mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2024-12-29 19:05:05 +08:00
Merge branch 'dev' into avoid-isfiles
This commit is contained in:
commit
df62ffbd25
@ -206,7 +206,7 @@ def parse_prompts(prompts):
|
||||
return res, extra_data
|
||||
|
||||
|
||||
def get_user_metadata(filename):
|
||||
def get_user_metadata(filename, lister=None):
|
||||
if filename is None:
|
||||
return {}
|
||||
|
||||
@ -215,10 +215,10 @@ def get_user_metadata(filename):
|
||||
|
||||
metadata = {}
|
||||
try:
|
||||
with open(metadata_filename, "r", encoding="utf8") as file:
|
||||
metadata = json.load(file)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
exists = lister.exists(metadata_filename) if lister else os.path.exists(metadata_filename)
|
||||
if exists:
|
||||
with open(metadata_filename, "r", encoding="utf8") as file:
|
||||
metadata = json.load(file)
|
||||
except Exception as e:
|
||||
errors.display(e, f"reading extra network user metadata from {metadata_filename}")
|
||||
|
||||
|
@ -3,7 +3,7 @@ import os.path
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
from modules import shared, ui_extra_networks_user_metadata, errors, extra_networks
|
||||
from modules import shared, ui_extra_networks_user_metadata, errors, extra_networks, util
|
||||
from modules.images import read_info_from_image, save_image_with_geninfo
|
||||
import gradio as gr
|
||||
import json
|
||||
@ -107,13 +107,14 @@ class ExtraNetworksPage:
|
||||
self.allow_negative_prompt = False
|
||||
self.metadata = {}
|
||||
self.items = {}
|
||||
self.lister = util.MassFileLister()
|
||||
|
||||
def refresh(self):
|
||||
pass
|
||||
|
||||
def read_user_metadata(self, item):
|
||||
filename = item.get("filename", None)
|
||||
metadata = extra_networks.get_user_metadata(filename)
|
||||
metadata = extra_networks.get_user_metadata(filename, lister=self.lister)
|
||||
|
||||
desc = metadata.get("description", None)
|
||||
if desc is not None:
|
||||
@ -123,7 +124,7 @@ class ExtraNetworksPage:
|
||||
|
||||
def link_preview(self, filename):
|
||||
quoted_filename = urllib.parse.quote(filename.replace('\\', '/'))
|
||||
mtime = os.path.getmtime(filename)
|
||||
mtime, _ = self.lister.mctime(filename)
|
||||
return f"./sd_extra_networks/thumb?filename={quoted_filename}&mtime={mtime}"
|
||||
|
||||
def search_terms_from_path(self, filename, possible_directories=None):
|
||||
@ -137,6 +138,8 @@ class ExtraNetworksPage:
|
||||
return ""
|
||||
|
||||
def create_html(self, tabname):
|
||||
self.lister.reset()
|
||||
|
||||
items_html = ''
|
||||
|
||||
self.metadata = {}
|
||||
@ -282,10 +285,10 @@ class ExtraNetworksPage:
|
||||
List of default keys used for sorting in the UI.
|
||||
"""
|
||||
pth = Path(path)
|
||||
stat = pth.stat()
|
||||
mtime, ctime = self.lister.mctime(path)
|
||||
return {
|
||||
"date_created": int(stat.st_ctime or 0),
|
||||
"date_modified": int(stat.st_mtime or 0),
|
||||
"date_created": int(mtime),
|
||||
"date_modified": int(ctime),
|
||||
"name": pth.name.lower(),
|
||||
"path": str(pth.parent).lower(),
|
||||
}
|
||||
@ -298,7 +301,7 @@ class ExtraNetworksPage:
|
||||
potential_files = sum([[path + "." + ext, path + ".preview." + ext] for ext in allowed_preview_extensions()], [])
|
||||
|
||||
for file in potential_files:
|
||||
if os.path.isfile(file):
|
||||
if self.lister.exists(file):
|
||||
return self.link_preview(file)
|
||||
|
||||
return None
|
||||
@ -308,6 +311,9 @@ class ExtraNetworksPage:
|
||||
Find and read a description file for a given path (without extension).
|
||||
"""
|
||||
for file in [f"{path}.txt", f"{path}.description.txt"]:
|
||||
if not self.lister.exists(file):
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(file, "r", encoding="utf-8", errors="replace") as f:
|
||||
return f.read()
|
||||
@ -417,21 +423,21 @@ def create_ui(interface: gr.Blocks, unrelated_tabs, tabname):
|
||||
|
||||
dropdown_sort.change(fn=lambda: None, _js="function(){ applyExtraNetworkSort('" + tabname + "'); }")
|
||||
|
||||
def create_html():
|
||||
ui.pages_contents = [pg.create_html(ui.tabname) for pg in ui.stored_extra_pages]
|
||||
|
||||
def pages_html():
|
||||
if not ui.pages_contents:
|
||||
return refresh()
|
||||
|
||||
create_html()
|
||||
return ui.pages_contents
|
||||
|
||||
def refresh():
|
||||
for pg in ui.stored_extra_pages:
|
||||
pg.refresh()
|
||||
|
||||
ui.pages_contents = [pg.create_html(ui.tabname) for pg in ui.stored_extra_pages]
|
||||
|
||||
create_html()
|
||||
return ui.pages_contents
|
||||
|
||||
interface.load(fn=pages_html, inputs=[], outputs=[*ui.pages])
|
||||
interface.load(fn=pages_html, inputs=[], outputs=ui.pages)
|
||||
button_refresh.click(fn=refresh, inputs=[], outputs=ui.pages)
|
||||
|
||||
return ui
|
||||
|
@ -30,7 +30,7 @@ def torch_bgr_to_pil_image(tensor: torch.Tensor) -> Image.Image:
|
||||
# TODO: is `tensor.float().cpu()...numpy()` the most efficient idiom?
|
||||
arr = tensor.float().cpu().clamp_(0, 1).numpy() # clamp
|
||||
arr = 255.0 * np.moveaxis(arr, 0, 2) # CHW to HWC, rescale
|
||||
arr = arr.astype(np.uint8)
|
||||
arr = arr.round().astype(np.uint8)
|
||||
arr = arr[:, :, ::-1] # flip BGR to RGB
|
||||
return Image.fromarray(arr, "RGB")
|
||||
|
||||
|
@ -66,3 +66,73 @@ def truncate_path(target_path, base_path=cwd):
|
||||
except ValueError:
|
||||
pass
|
||||
return abs_target
|
||||
|
||||
|
||||
class MassFileListerCachedDir:
|
||||
"""A class that caches file metadata for a specific directory."""
|
||||
|
||||
def __init__(self, dirname):
|
||||
self.files = None
|
||||
self.files_cased = None
|
||||
self.dirname = dirname
|
||||
|
||||
stats = ((x.name, x.stat(follow_symlinks=False)) for x in os.scandir(self.dirname))
|
||||
files = [(n, s.st_mtime, s.st_ctime) for n, s in stats]
|
||||
self.files = {x[0].lower(): x for x in files}
|
||||
self.files_cased = {x[0]: x for x in files}
|
||||
|
||||
|
||||
class MassFileLister:
|
||||
"""A class that provides a way to check for the existence and mtime/ctile of files without doing more than one stat call per file."""
|
||||
|
||||
def __init__(self):
|
||||
self.cached_dirs = {}
|
||||
|
||||
def find(self, path):
|
||||
"""
|
||||
Find the metadata for a file at the given path.
|
||||
|
||||
Returns:
|
||||
tuple or None: A tuple of (name, mtime, ctime) if the file exists, or None if it does not.
|
||||
"""
|
||||
|
||||
dirname, filename = os.path.split(path)
|
||||
|
||||
cached_dir = self.cached_dirs.get(dirname)
|
||||
if cached_dir is None:
|
||||
cached_dir = MassFileListerCachedDir(dirname)
|
||||
self.cached_dirs[dirname] = cached_dir
|
||||
|
||||
stats = cached_dir.files_cased.get(filename)
|
||||
if stats is not None:
|
||||
return stats
|
||||
|
||||
stats = cached_dir.files.get(filename.lower())
|
||||
if stats is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
os_stats = os.stat(path, follow_symlinks=False)
|
||||
return filename, os_stats.st_mtime, os_stats.st_ctime
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def exists(self, path):
|
||||
"""Check if a file exists at the given path."""
|
||||
|
||||
return self.find(path) is not None
|
||||
|
||||
def mctime(self, path):
|
||||
"""
|
||||
Get the modification and creation times for a file at the given path.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple of (mtime, ctime) if the file exists, or (0, 0) if it does not.
|
||||
"""
|
||||
|
||||
stats = self.find(path)
|
||||
return (0, 0) if stats is None else stats[1:3]
|
||||
|
||||
def reset(self):
|
||||
"""Clear the cache of all directories."""
|
||||
self.cached_dirs.clear()
|
||||
|
Loading…
Reference in New Issue
Block a user