run_git(dir,name,f'remote set-url origin "{url}"',None,f"Failed to set {name}'s origin URL",live=False)
run_git('checkout',f"Checking out commit for {name} with hash: {commithash}...",f"Couldn't checkout commit {commithash} for {name}",live=True)
run_git(dir,name,'fetch',f"Fetching updates for {name}...",f"Couldn't fetch {name}",autofix=False)
run_git(dir,name,f'checkout {commithash}',f"Checking out commit for {name} with hash: {commithash}...",f"Couldn't checkout commit {commithash} for {name}",live=True)
refiner_checkpoint=gr.Dropdown(label='Checkpoint',elem_id=self.elem_id("checkpoint"),choices=sd_models.checkpoint_tiles(),value='',tooltip="switch to another model in the middle of generation")
refiner_switch_at=gr.Slider(value=0.8,label="Switch at",minimum=0.01,maximum=1.0,step=0.01,elem_id=self.elem_id("switch_at"),tooltip="fraction of sampling steps when the switch to refiner model should happen; 1=never, 0.5=switch in the middle of generation")
@@ -100,7 +105,7 @@ class CFGDenoiser(torch.nn.Module):
assertnotis_edit_modelorall(len(conds)==1forcondsinconds_list),"AND is not supported for InstructPix2Pix checkpoint (unless using Image CFG scale = 1.0)"
"upcast_attn":OptionInfo(False,"Upcast cross attention layer to float32"),
"randn_source":OptionInfo("GPU","Random number generator source.",gr.Radio,{"choices":["GPU","CPU","NV"]}).info("changes seeds drastically; use CPU to produce the same picture across different videocard vendors; use NV to produce same picture as on NVidia videocards"),
"tiling":OptionInfo(False,"Tiling",infotext='Tiling').info("produce a tileable picture"),
"sd_refiner_checkpoint":OptionInfo("None","Refiner checkpoint",gr.Dropdown,lambda:{"choices":["None"]+shared_items.list_checkpoint_tiles()},refresh=shared_items.refresh_checkpoints,infotext="Refiner").info("switch to another model in the middle of generation"),
"sd_refiner_switch_at":OptionInfo(1.0,"Refiner switch at",gr.Slider,{"minimum":0.01,"maximum":1.0,"step":0.01},infotext='Refiner switch at').info("fraction of sampling steps when the swtch to refiner model should happen; 1=never, 0.5=switch in the middle of generation"),
"hide_samplers":OptionInfo([],"Hide samplers in user interface",gr.CheckboxGroup,lambda:{"choices":[x.nameforxinshared_items.list_samplers()]}).needs_reload_ui(),
"eta_ddim":OptionInfo(0.0,"Eta for DDIM",gr.Slider,{"minimum":0.0,"maximum":1.0,"step":0.01},infotext='Eta DDIM').info("noise multiplier; higher = more unperdictable results"),
"eta_ancestral":OptionInfo(1.0,"Eta for ancestral samplers",gr.Slider,{"minimum":0.0,"maximum":1.0,"step":0.01},infotext='Eta').info("noise multiplier; applies to Euler a and other samplers that have a in them"),
"eta_ancestral":OptionInfo(1.0,"Eta for k-diffusion samplers",gr.Slider,{"minimum":0.0,"maximum":1.0,"step":0.01},infotext='Eta').info("noise multiplier; currently only applies to ancestral samplers (i.e. Euler a) and SDE samplers"),
"ddim_discretize":OptionInfo('uniform',"img2img DDIM discretize",gr.Radio,{"choices":['uniform','quad']}),
's_churn':OptionInfo(0.0,"sigma churn",gr.Slider,{"minimum":0.0,"maximum":100.0,"step":0.01},infotext='Sigma churn').info('amount of stochasticity; only applies to Euler, Heun, and DPM2'),
's_tmin':OptionInfo(0.0,"sigma tmin",gr.Slider,{"minimum":0.0,"maximum":10.0,"step":0.01},infotext='Sigma tmin').info('enable stochasticity; start value of the sigma range; only applies to Euler, Heun, and DPM2'),
's_tmax':OptionInfo(0.0,"sigma tmax",gr.Slider,{"minimum":0.0,"maximum":999.0,"step":0.01},infotext='Sigma tmax').info("0 = inf; end value of the sigma range; only applies to Euler, Heun, and DPM2"),
's_noise':OptionInfo(1.0,"sigma noise",gr.Slider,{"minimum":0.0,"maximum":1.1,"step":0.001},infotext='Sigma noise').info('amount of additional noise to counteract loss of detail during sampling; only applies to Euler, Heun, and DPM2'),
's_noise':OptionInfo(1.0,"sigma noise",gr.Slider,{"minimum":0.0,"maximum":1.1,"step":0.001},infotext='Sigma noise').info('amount of additional noise to counteract loss of detail during sampling'),
'k_sched_type':OptionInfo("Automatic","Scheduler type",gr.Dropdown,{"choices":["Automatic","karras","exponential","polyexponential"]},infotext='Schedule type').info("lets you override the noise schedule for k-diffusion samplers; choosing Automatic disables the three parameters below"),
'sigma_min':OptionInfo(0.0,"sigma min",gr.Number,infotext='Schedule max sigma').info("0 = default (~0.03); minimum noise strength for k-diffusion noise scheduler"),
'sigma_max':OptionInfo(0.0,"sigma max",gr.Number,infotext='Schedule min sigma').info("0 = default (~14.6); maximum noise strength for k-diffusion noise scheduler"),
hr_checkpoint_name=gr.Dropdown(label='Hires checkpoint',elem_id="hr_checkpoint",choices=["Use same checkpoint"]+modules.sd_models.checkpoint_tiles(use_short=True),value="Use same checkpoint")
create_refresh_button(hr_checkpoint_name,modules.sd_models.list_models,lambda:{"choices":["Use same checkpoint"]+modules.sd_models.checkpoint_tiles(use_short=True)},"hr_checkpoint_refresh")
hr_checkpoint_name=gr.Dropdown(label='Hires checkpoint',elem_id="hr_checkpoint",choices=["Use same checkpoint"]+modules.sd_models.checkpoint_tiles(use_short=True),value="Use same checkpoint")
create_refresh_button(hr_checkpoint_name,modules.sd_models.list_models,lambda:{"choices":["Use same checkpoint"]+modules.sd_models.checkpoint_tiles(use_short=True)},"hr_checkpoint_refresh")
hr_sampler_name=gr.Dropdown(label='Hires sampling method',elem_id="hr_sampler",choices=["Use same sampler"]+sd_samplers.visible_sampler_names(),value="Use same sampler")
hr_sampler_name=gr.Dropdown(label='Hires sampling method',elem_id="hr_sampler",choices=["Use same sampler"]+sd_samplers.visible_sampler_names(),value="Use same sampler")
hr_prompt=gr.Textbox(label="Hires prompt",elem_id="hires_prompt",show_label=False,lines=3,placeholder="Prompt for hires fix pass.\nLeave empty to use the same prompt as in first pass.",elem_classes=["prompt"])
withgr.Column(scale=80):
withgr.Row():
hr_negative_prompt=gr.Textbox(label="Hires negative prompt",elem_id="hires_neg_prompt",show_label=False,lines=3,placeholder="Negative prompt for hires fix pass.\nLeave empty to use the same negative prompt as in first pass.",elem_classes=["prompt"])
hr_prompt=gr.Textbox(label="Hires prompt",elem_id="hires_prompt",show_label=False,lines=3,placeholder="Prompt for hires fix pass.\nLeave empty to use the same prompt as in first pass.",elem_classes=["prompt"])
withgr.Column(scale=80):
withgr.Row():
hr_negative_prompt=gr.Textbox(label="Hires negative prompt",elem_id="hires_neg_prompt",show_label=False,lines=3,placeholder="Negative prompt for hires fix pass.\nLeave empty to use the same negative prompt as in first pass.",elem_classes=["prompt"])
ifisinstance(x,gr.Textbox)andfield=='value':# due to an undersirable behavior of gr.Textbox, if you give it an int value instead of str, everything dies
ifisinstance(x,gr.Textbox)andfield=='value':# due to an undesirable behavior of gr.Textbox, if you give it an int value instead of str, everything dies
assertgrid_mp<opts.img_max_size_mp,f'Error: Resulting grid would be too large ({grid_mp} MPixels) (max configured size is {opts.img_max_size_mp} MPixels)'
...
...
@@ -720,7 +752,7 @@ class Script(scripts.Script):
# Auto-save main and sub-grids:
grid_count=z_count+1ifz_count>1else1
forginrange(grid_count):
#TODO: See previous comment about intentional data misalignment.
#TODO: See previous comment about intentional data misalignment.