optimize: no json config writeback

This commit is contained in:
源文雨 2023-09-02 13:53:56 +08:00
parent 3f78b73ec7
commit ad85b02ed9
8 changed files with 85 additions and 61 deletions

View File

@ -1,6 +1,7 @@
import argparse
import os
import sys
import json
from multiprocessing import cpu_count
import torch
@ -10,23 +11,13 @@ import logging
logger = logging.getLogger(__name__)
def use_fp32_config():
for config_file in [
version_config_list = [
"v1/32k.json",
"v1/40k.json",
"v1/48k.json",
"v2/48k.json",
"v2/32k.json",
]:
with open(f"configs/{config_file}", "r") as f:
strr = f.read().replace("true", "false")
with open(f"configs/{config_file}", "w") as f:
f.write(strr)
with open("infer/modules/train/preprocess.py", "r") as f:
strr = f.read().replace("3.7", "3.0")
with open("infer/modules/train/preprocess.py", "w") as f:
f.write(strr)
]
def singleton_variable(func):
def wrapper(*args, **kwargs):
@ -45,6 +36,7 @@ class Config:
self.is_half = True
self.n_cpu = 0
self.gpu_name = None
self.json_config = self.load_config_json()
self.gpu_mem = None
(
self.python_cmd,
@ -57,6 +49,14 @@ class Config:
self.instead = ""
self.x_pad, self.x_query, self.x_center, self.x_max = self.device_config()
@staticmethod
def load_config_json() -> dict:
d = {}
for config_file in version_config_list:
with open(f"configs/{config_file}", "r") as f:
d[config_file] = json.load(f)
return d
@staticmethod
def arg_parse() -> tuple:
exe = sys.executable or "python"
@ -102,6 +102,10 @@ class Config:
except Exception:
return False
def use_fp32_config(self):
for config_file in version_config_list:
self.json_config[config_file]["train"]["fp16_run"] = False
def device_config(self) -> tuple:
if torch.cuda.is_available():
i_device = int(self.device.split(":")[-1])
@ -116,7 +120,7 @@ class Config:
):
logger.info("Found GPU %s, force to fp32", self.gpu_name)
self.is_half = False
use_fp32_config()
self.use_fp32_config()
else:
logger.info("Found GPU %s", self.gpu_name)
self.gpu_mem = int(
@ -135,12 +139,12 @@ class Config:
logger.info("No supported Nvidia GPU found")
self.device = self.instead = "mps"
self.is_half = False
use_fp32_config()
self.use_fp32_config()
else:
logger.info("No supported Nvidia GPU found")
self.device = self.instead = "cpu"
self.is_half = False
use_fp32_config()
self.use_fp32_config()
if self.n_cpu == 0:
self.n_cpu = cpu_count()

View File

@ -358,7 +358,7 @@ if __name__ == "__main__":
)
if event == "start_vc" and self.flag_vc == False:
if self.set_values(values) == True:
logger.info("Use CUDA: %b", torch.cuda.is_available())
logger.info("Use CUDA: %s", torch.cuda.is_available())
self.start_vc()
settings = {
"pth_path": values["pth_path"],

View File

@ -10,6 +10,8 @@ import warnings
from random import shuffle
from subprocess import Popen
from time import sleep
import json
import pathlib
import fairseq
import faiss
@ -200,20 +202,15 @@ def if_done_multi(done, ps):
done[0] = True
def get_quoted_python_cmd():
return f'"{config.python_cmd}"'
def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
sr = sr_dict[sr]
os.makedirs("%s/logs/%s" % (now_dir, exp_dir), exist_ok=True)
f = open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "w")
f.close()
per = 3.0 if config.is_half else 3.7
cmd = (
get_quoted_python_cmd()
+ ' infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s" '
% (trainset_dir, sr, n_p, now_dir, exp_dir)
+ str(config.noparallel)
'"%s" infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s" %s %.1f'
% (config.python_cmd, trainset_dir, sr, n_p, now_dir, exp_dir, config.noparallel, per)
)
logger.info(cmd)
p = Popen(cmd, shell=True) # , stdin=PIPE, stdout=PIPE,stderr=PIPE,cwd=now_dir
@ -247,9 +244,9 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
if if_f0:
if f0method != "rmvpe_gpu":
cmd = (
get_quoted_python_cmd()
+ ' infer/modules/train/extract/extract_f0_print.py "%s/logs/%s" %s %s'
'"%s" infer/modules/train/extract/extract_f0_print.py "%s/logs/%s" %s %s'
% (
config.python_cmd,
now_dir,
exp_dir,
n_p,
@ -275,7 +272,8 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
leng = len(gpus_rmvpe)
ps = []
for idx, n_g in enumerate(gpus_rmvpe):
cmd = get_quoted_python_cmd() + ' infer/modules/train/extract/extract_f0_rmvpe.py %s %s %s "%s/logs/%s" %s ' % (
cmd = '"%s" infer/modules/train/extract/extract_f0_rmvpe.py %s %s %s "%s/logs/%s" %s ' % (
config.python_cmd,
leng,
idx,
n_g,
@ -335,7 +333,8 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
leng = len(gpus)
ps = []
for idx, n_g in enumerate(gpus):
cmd = get_quoted_python_cmd() + ' infer/modules/train/extract_feature_print.py %s %s %s %s "%s/logs/%s" %s' % (
cmd = '"%s" infer/modules/train/extract_feature_print.py %s %s %s %s "%s/logs/%s" %s' % (
config.python_cmd,
config.device,
leng,
idx,
@ -516,8 +515,18 @@ def click_train(
logger.info("No pretrained Generator")
if pretrained_D15 == "":
logger.info("No pretrained Discriminator")
if version19 == "v1" or sr2 == "40k":
config_path = "v1/%s.json" % sr2
else:
config_path = "v2/%s.json" % sr2
config_save_path = os.path.join(exp_dir, "config.json")
if not pathlib.Path(config_save_path).exists():
with open(config_save_path, "w", encoding="utf-8") as f:
json.dump(config.json_config[config_path], f, ensure_ascii=False, indent=4, sort_keys=True)
f.write("\n")
if gpus16:
cmd = get_quoted_python_cmd() + ' infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -g %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s' % (
cmd = '"%s" infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -g %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s' % (
config.python_cmd,
exp_dir1,
sr2,
1 if if_f0_3 else 0,
@ -534,9 +543,9 @@ def click_train(
)
else:
cmd = (
config.python_cmd
+ ' infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s'
'"%s" infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s'
% (
config.python_cmd,
exp_dir1,
sr2,
1 if if_f0_3 else 0,

View File

@ -5,7 +5,7 @@ import logging
import os
import subprocess
import sys
import traceback
import shutil
import numpy as np
import torch
@ -297,7 +297,6 @@ def get_hparams(init=True):
-c不要了
"""
parser = argparse.ArgumentParser()
# parser.add_argument('-c', '--config', type=str, default="configs/40k.json",help='JSON file for configuration')
parser.add_argument(
"-se",
"--save_every_epoch",
@ -360,23 +359,9 @@ def get_hparams(init=True):
name = args.experiment_dir
experiment_dir = os.path.join("./logs", args.experiment_dir)
if not os.path.exists(experiment_dir):
os.makedirs(experiment_dir)
if args.version == "v1" or args.sample_rate == "40k":
config_path = "configs/v1/%s.json" % args.sample_rate
else:
config_path = "configs/v2/%s.json" % args.sample_rate
config_save_path = os.path.join(experiment_dir, "config.json")
if init:
with open(config_path, "r") as f:
data = f.read()
with open(config_save_path, "w") as f:
f.write(data)
else:
with open(config_save_path, "r") as f:
data = f.read()
config = json.loads(data)
config = json.load(f)
hparams = HParams(**config)
hparams.model_dir = hparams.experiment_dir = experiment_dir

View File

@ -12,6 +12,7 @@ sr = int(sys.argv[2])
n_p = int(sys.argv[3])
exp_dir = sys.argv[4]
noparallel = sys.argv[5] == "True"
per = float(sys.argv[6])
import multiprocessing
import os
import traceback
@ -36,7 +37,7 @@ def println(strr):
class PreProcess:
def __init__(self, sr, exp_dir):
def __init__(self, sr, exp_dir, per=3.7):
self.slicer = Slicer(
sr=sr,
threshold=-42,
@ -47,7 +48,7 @@ class PreProcess:
)
self.sr = sr
self.bh, self.ah = signal.butter(N=5, Wn=48, btype="high", fs=self.sr)
self.per = 3.0
self.per = per
self.overlap = 0.3
self.tail = self.per + self.overlap
self.max = 0.9
@ -134,8 +135,8 @@ class PreProcess:
println("Fail. %s" % traceback.format_exc())
def preprocess_trainset(inp_root, sr, n_p, exp_dir):
pp = PreProcess(sr, exp_dir)
def preprocess_trainset(inp_root, sr, n_p, exp_dir, per):
pp = PreProcess(sr, exp_dir, per)
println("start preprocess")
println(sys.argv)
pp.pipeline_mp_inp_dir(inp_root, n_p)
@ -143,4 +144,4 @@ def preprocess_trainset(inp_root, sr, n_p, exp_dir):
if __name__ == "__main__":
preprocess_trainset(inp_root, sr, n_p, exp_dir)
preprocess_trainset(inp_root, sr, n_p, exp_dir, per)

View File

@ -210,12 +210,12 @@ class VC:
if self.tgt_sr != resample_sr >= 16000:
self.tgt_sr = resample_sr
index_info = (
"Using index:%s." % file_index
"Index:\n%s." % file_index
if os.path.exists(file_index)
else "Index not used."
)
return (
f"Success.\n {index_info}\nTime:\n npy:{times[0]}s, f0:{times[1]}s, infer:{times[2]}s",
"Success.\n%s\nTime:\nnpy: %.2fs, f0: %.2fs, infer: %.2fs." % (index_info, *times),
(self.tgt_sr, audio_opt),
)
except:

4
run.sh
View File

@ -53,5 +53,9 @@ fi
# Download models
./tools/dlmodels.sh
if [[ $? -ne 0 ]]; then
exit 1
fi
# Run the main script
python3 infer-web.py --pycmd python3

View File

@ -39,6 +39,7 @@ VR_DeEchoAggressive="VR-DeEchoAggressive.pth"
VR_DeEchoDeReverb="VR-DeEchoDeReverb.pth"
VR_DeEchoNormal="VR-DeEchoNormal.pth"
onnx_dereverb="vocals.onnx"
rmvpe="rmvpe.pt"
dlhp2_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP2_all_vocals.pth"
dlhp3_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP3_all_vocals.pth"
@ -47,6 +48,7 @@ dlVR_DeEchoAggressive="https://huggingface.co/lj1995/VoiceConversionWebUI/resolv
dlVR_DeEchoDeReverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoDeReverb.pth"
dlVR_DeEchoNormal="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoNormal.pth"
dlonnx_dereverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/onnx_dereverb_By_FoxJoy/vocals.onnx"
dlrmvpe="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/rmvpe.pt"
hb="hubert_base.pt"
@ -523,6 +525,25 @@ else
fi
fi
echo checking $rmvpe
if [ -f "./assets/rmvpe/$rmvpe" ]; then
echo $rmvpe in ./assets/rmvpe checked.
else
echo failed. starting download from huggingface.
if command -v aria2c &> /dev/null; then
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M $dlrmvpe -d ./assets/rmvpe -o $rmvpe
if [ -f "./assets/rmvpe/$rmvpe" ]; then
echo download successful.
else
echo please try again!
exit 1
fi
else
echo aria2c command not found. Please install aria2c and try again.
exit 1
fi
fi
echo checking $hb
if [ -f "./assets/hubert/$hb" ]; then
echo $hb in ./assets/hubert/pretrained checked.