mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2025-01-06 15:15:05 +08:00
57aaa068bb
* By default, upscaling will loop up to 3 times to upscale the incoming image to the target dimensions. This is necessary, as upscaling models work in fixed increments (x4 is common). * For very small images, such as those generated by ADetailer for inpainting, this can result in additional upscaling steps, which can be expensive. Usually the incoming image is only off by a small amount, so it can be preferable to do a minor upscale via Lanczos before the main upscaling step. * We introduce an optional value to the upscaler settings to allow this minor upscale, should the incoming image fall below a certain threshold compared to the fixed scaling value of the upscaler model. By default, this setting is set to 1.0, effectively disabling it.
186 lines
6.0 KiB
Python
186 lines
6.0 KiB
Python
import os
|
|
from abc import abstractmethod
|
|
|
|
import PIL
|
|
from PIL import Image
|
|
|
|
import modules.shared
|
|
from modules import modelloader, shared
|
|
import math
|
|
|
|
LANCZOS = (Image.Resampling.LANCZOS if hasattr(Image, 'Resampling') else Image.LANCZOS)
|
|
NEAREST = (Image.Resampling.NEAREST if hasattr(Image, 'Resampling') else Image.NEAREST)
|
|
|
|
|
|
class Upscaler:
|
|
name = None
|
|
model_path = None
|
|
model_name = None
|
|
model_url = None
|
|
enable = True
|
|
filter = None
|
|
model = None
|
|
user_path = None
|
|
scalers: list
|
|
tile = True
|
|
|
|
def __init__(self, create_dirs=False):
|
|
self.mod_pad_h = None
|
|
self.tile_size = modules.shared.opts.ESRGAN_tile
|
|
self.tile_pad = modules.shared.opts.ESRGAN_tile_overlap
|
|
self.device = modules.shared.device
|
|
self.img = None
|
|
self.output = None
|
|
self.scale = 1
|
|
self.half = not modules.shared.cmd_opts.no_half
|
|
self.pre_pad = 0
|
|
self.mod_scale = None
|
|
self.model_download_path = None
|
|
|
|
if self.model_path is None and self.name:
|
|
self.model_path = os.path.join(shared.models_path, self.name)
|
|
if self.model_path and create_dirs:
|
|
os.makedirs(self.model_path, exist_ok=True)
|
|
|
|
try:
|
|
import cv2 # noqa: F401
|
|
self.can_tile = True
|
|
except Exception:
|
|
pass
|
|
|
|
@abstractmethod
|
|
def do_upscale(self, img: PIL.Image, selected_model: str):
|
|
return img
|
|
|
|
def upscale(self, img: PIL.Image, scale, selected_model: str = None):
|
|
self.scale = scale
|
|
dest_w = int((img.width * scale) // 8 * 8)
|
|
dest_h = int((img.height * scale) // 8 * 8)
|
|
|
|
# Attempt a cheap resize of the source image, if it falls below the fixed scaling size of the upscaling model.
|
|
# We resize the image by the smallest amount necessary for the fixed scaling to meet the target dimensions.
|
|
|
|
prescale_threshold = modules.shared.opts.upscaler_fast_prescale_threshold
|
|
if prescale_threshold > 1 and self.name and self.name not in ["Nearest", "Lanczos"]:
|
|
|
|
# Get the matching upscaler
|
|
upscaler_data = next((x for x in self.scalers if x.data_path == selected_model), None)
|
|
|
|
if upscaler_data is not None:
|
|
upscaler_scale = upscaler_data.scale
|
|
if scale > upscaler_scale:
|
|
|
|
# Calculate the minimum intermediate dimensions.
|
|
min_intermediate_w = math.ceil(dest_w / upscaler_scale)
|
|
min_intermediate_h = math.ceil(dest_h / upscaler_scale)
|
|
|
|
# Preserve aspect ratio and make sure any adjustments don't drop us below the
|
|
# minimum scaling needed.
|
|
aspect_ratio = img.width / img.height
|
|
|
|
intermediate_w = max(min_intermediate_w, int(math.ceil(min_intermediate_h * aspect_ratio)))
|
|
intermediate_h = max(min_intermediate_h, int(math.ceil(min_intermediate_w / aspect_ratio)))
|
|
|
|
if intermediate_w / aspect_ratio > intermediate_h:
|
|
intermediate_w = int(math.ceil(intermediate_h * aspect_ratio))
|
|
else:
|
|
intermediate_h = int(math.ceil(intermediate_w / aspect_ratio))
|
|
|
|
scale_diff = max(intermediate_w / img.width, intermediate_h / img.height)
|
|
|
|
if scale_diff <= prescale_threshold:
|
|
img = img.resize((intermediate_w, intermediate_h), resample=LANCZOS)
|
|
|
|
for i in range(3):
|
|
if img.width >= dest_w and img.height >= dest_h and (i > 0 or scale != 1):
|
|
break
|
|
|
|
if shared.state.interrupted:
|
|
break
|
|
|
|
shape = (img.width, img.height)
|
|
|
|
img = self.do_upscale(img, selected_model)
|
|
|
|
if shape == (img.width, img.height):
|
|
break
|
|
|
|
if img.width != dest_w or img.height != dest_h:
|
|
img = img.resize((int(dest_w), int(dest_h)), resample=LANCZOS)
|
|
|
|
return img
|
|
|
|
@abstractmethod
|
|
def load_model(self, path: str):
|
|
pass
|
|
|
|
def find_models(self, ext_filter=None) -> list:
|
|
return modelloader.load_models(model_path=self.model_path, model_url=self.model_url, command_path=self.user_path, ext_filter=ext_filter)
|
|
|
|
def update_status(self, prompt):
|
|
print(f"\nextras: {prompt}", file=shared.progress_print_out)
|
|
|
|
|
|
class UpscalerData:
|
|
name = None
|
|
data_path = None
|
|
scale: int = 4
|
|
scaler: Upscaler = None
|
|
model: None
|
|
|
|
def __init__(self, name: str, path: str, upscaler: Upscaler = None, scale: int = 4, model=None):
|
|
self.name = name
|
|
self.data_path = path
|
|
self.local_data_path = path
|
|
self.scaler = upscaler
|
|
self.scale = scale
|
|
self.model = model
|
|
|
|
def __repr__(self):
|
|
return f"<UpscalerData name={self.name} path={self.data_path} scale={self.scale}>"
|
|
|
|
|
|
class UpscalerNone(Upscaler):
|
|
name = "None"
|
|
scalers = []
|
|
|
|
def load_model(self, path):
|
|
pass
|
|
|
|
def do_upscale(self, img, selected_model=None):
|
|
return img
|
|
|
|
def __init__(self, dirname=None):
|
|
super().__init__(False)
|
|
self.scalers = [UpscalerData("None", None, self)]
|
|
|
|
|
|
class UpscalerLanczos(Upscaler):
|
|
scalers = []
|
|
|
|
def do_upscale(self, img, selected_model=None):
|
|
return img.resize((int(img.width * self.scale), int(img.height * self.scale)), resample=LANCZOS)
|
|
|
|
def load_model(self, _):
|
|
pass
|
|
|
|
def __init__(self, dirname=None):
|
|
super().__init__(False)
|
|
self.name = "Lanczos"
|
|
self.scalers = [UpscalerData("Lanczos", None, self)]
|
|
|
|
|
|
class UpscalerNearest(Upscaler):
|
|
scalers = []
|
|
|
|
def do_upscale(self, img, selected_model=None):
|
|
return img.resize((int(img.width * self.scale), int(img.height * self.scale)), resample=NEAREST)
|
|
|
|
def load_model(self, _):
|
|
pass
|
|
|
|
def __init__(self, dirname=None):
|
|
super().__init__(False)
|
|
self.name = "Nearest"
|
|
self.scalers = [UpscalerData("Nearest", None, self)]
|