Commit 4b26b50d authored by AUTOMATIC's avatar AUTOMATIC

Merge remote-tracking branch 'origin/master'

parents 254da5d1 b5693699
...@@ -10,22 +10,34 @@ function dropReplaceImage( imgWrap, files ) { ...@@ -10,22 +10,34 @@ function dropReplaceImage( imgWrap, files ) {
} }
imgWrap.querySelector('.modify-upload button + button, .touch-none + div button + button')?.click(); imgWrap.querySelector('.modify-upload button + button, .touch-none + div button + button')?.click();
window.requestAnimationFrame( () => { const callback = () => {
const fileInput = imgWrap.querySelector('input[type="file"]'); const fileInput = imgWrap.querySelector('input[type="file"]');
if ( fileInput ) { if ( fileInput ) {
fileInput.files = files; fileInput.files = files;
fileInput.dispatchEvent(new Event('change')); fileInput.dispatchEvent(new Event('change'));
} }
}); };
}
if ( imgWrap.closest('#pnginfo_image') ) {
function pressClearBtn(hoverElems) { // special treatment for PNG Info tab, wait for fetch request to finish
//Find all buttons hovering over the image box const oldFetch = window.fetch;
let btns = Array.from(hoverElems.querySelectorAll("button")) window.fetch = async (input, options) => {
const response = await oldFetch(input, options);
//Press the last btn which will be the X button if ( 'api/predict/' === input ) {
if (btns.length) const content = await response.text();
btns[btns.length-1].click() window.fetch = oldFetch;
window.requestAnimationFrame( () => callback() );
return new Response(content, {
status: response.status,
statusText: response.statusText,
headers: response.headers
})
}
return response;
};
} else {
window.requestAnimationFrame( () => callback() );
}
} }
window.document.addEventListener('dragover', e => { window.document.addEventListener('dragover', e => {
...@@ -36,13 +48,7 @@ window.document.addEventListener('dragover', e => { ...@@ -36,13 +48,7 @@ window.document.addEventListener('dragover', e => {
} }
e.stopPropagation(); e.stopPropagation();
e.preventDefault(); e.preventDefault();
e.dataTransfer.dropEffect = 'copy';
if (e.dataTransfer)
e.dataTransfer.dropEffect = 'copy';
//If is gr.Interface clear image on hover
if (target.previousElementSibling)
pressClearBtn(target.previousElementSibling)
}); });
window.document.addEventListener('drop', e => { window.document.addEventListener('drop', e => {
......
...@@ -140,8 +140,10 @@ class Options: ...@@ -140,8 +140,10 @@ class Options:
"enable_emphasis": OptionInfo(True, "Use (text) to make model pay more attention to text and [text] to make it pay less attention"), "enable_emphasis": OptionInfo(True, "Use (text) to make model pay more attention to text and [text] to make it pay less attention"),
"enable_batch_seeds": OptionInfo(True, "Make K-diffusion samplers produce same images in a batch as when making a single image"), "enable_batch_seeds": OptionInfo(True, "Make K-diffusion samplers produce same images in a batch as when making a single image"),
"save_txt": OptionInfo(False, "Create a text file next to every image with generation parameters."), "save_txt": OptionInfo(False, "Create a text file next to every image with generation parameters."),
"GAN_tile": OptionInfo(192, "Tile size for all upscalers. 0 = no tiling.", gr.Slider, {"minimum": 0, "maximum": 512, "step": 16}), "ESRGAN_tile": OptionInfo(192, "Tile size for ESRGAN upscalers. 0 = no tiling.", gr.Slider, {"minimum": 0, "maximum": 512, "step": 16}),
"GAN_tile_overlap": OptionInfo(8, "Tile overlap, in pixels for all upscalers. Low values = visible seam.", gr.Slider, {"minimum": 0, "maximum": 48, "step": 1}), "ESRGAN_tile_overlap": OptionInfo(8, "Tile overlap, in pixels for ESRGAN upscalers. Low values = visible seam.", gr.Slider, {"minimum": 0, "maximum": 48, "step": 1}),
"SWIN_tile": OptionInfo(192, "Tile size for all SwinIR.", gr.Slider, {"minimum": 16, "maximum": 512, "step": 16}),
"SWIN_tile_overlap": OptionInfo(8, "Tile overlap, in pixels for SwinIR. Low values = visible seam.", gr.Slider, {"minimum": 0, "maximum": 48, "step": 1}),
"random_artist_categories": OptionInfo([], "Allowed categories for random artists selection when using the Roll button", gr.CheckboxGroup, {"choices": artist_db.categories()}), "random_artist_categories": OptionInfo([], "Allowed categories for random artists selection when using the Roll button", gr.CheckboxGroup, {"choices": artist_db.categories()}),
"upscale_at_full_resolution_padding": OptionInfo(16, "Inpainting at full resolution: padding, in pixels, for the masked region.", gr.Slider, {"minimum": 0, "maximum": 128, "step": 4}), "upscale_at_full_resolution_padding": OptionInfo(16, "Inpainting at full resolution: padding, in pixels, for the masked region.", gr.Slider, {"minimum": 0, "maximum": 128, "step": 4}),
"upscaler_for_hires_fix": OptionInfo(None, "Upscaler for highres. fix", gr.Radio, lambda: {"choices": [x.name for x in sd_upscalers]}), "upscaler_for_hires_fix": OptionInfo(None, "Upscaler for highres. fix", gr.Radio, lambda: {"choices": [x.name for x in sd_upscalers]}),
......
...@@ -55,8 +55,8 @@ def load_models(dirname): ...@@ -55,8 +55,8 @@ def load_models(dirname):
def upscale( def upscale(
img, img,
model, model,
tile=opts.GAN_tile, tile=opts.SWIN_tile,
tile_overlap=opts.GAN_tile_overlap, tile_overlap=opts.SWIN_tile_overlap,
window_size=8, window_size=8,
scale=4, scale=4,
): ):
......
...@@ -797,7 +797,7 @@ def create_ui(txt2img, img2img, run_extras, run_pnginfo): ...@@ -797,7 +797,7 @@ def create_ui(txt2img, img2img, run_extras, run_pnginfo):
pnginfo_interface = gr.Interface( pnginfo_interface = gr.Interface(
wrap_gradio_call(run_pnginfo), wrap_gradio_call(run_pnginfo),
inputs=[ inputs=[
gr.Image(label="Source", source="upload", interactive=True, type="pil"), gr.Image(elem_id="pnginfo_image", label="Source", source="upload", interactive=True, type="pil"),
], ],
outputs=[ outputs=[
gr.HTML(), gr.HTML(),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment