mirror of
https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI.git
synced 2025-03-09 23:34:54 +08:00
Add files via upload
This commit is contained in:
parent
3b5a2298d7
commit
404ce9338f
@ -51,9 +51,9 @@ def spectrogram_torch(y, n_fft, sampling_rate, hop_size, win_size, center=False)
|
||||
:: (B, Freq, Frame) - Linear-frequency Linear-amplitude spectrogram
|
||||
"""
|
||||
# Validation
|
||||
if torch.min(y) < -1.0:
|
||||
if torch.min(y) < -1.07:
|
||||
print("min value is ", torch.min(y))
|
||||
if torch.max(y) > 1.0:
|
||||
if torch.max(y) > 1.07:
|
||||
print("max value is ", torch.max(y))
|
||||
|
||||
# Window - Cache if needed
|
||||
|
@ -2,7 +2,7 @@ import torch, traceback, os, pdb
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def savee(ckpt, sr, if_f0, name, epoch):
|
||||
def savee(ckpt, sr, if_f0, name, epoch,version):
|
||||
try:
|
||||
opt = OrderedDict()
|
||||
opt["weight"] = {}
|
||||
@ -76,6 +76,7 @@ def savee(ckpt, sr, if_f0, name, epoch):
|
||||
opt["info"] = "%sepoch" % epoch
|
||||
opt["sr"] = sr
|
||||
opt["f0"] = if_f0
|
||||
opt["version"] = version
|
||||
torch.save(opt, "weights/%s.pth" % name)
|
||||
return "Success."
|
||||
except:
|
||||
@ -85,16 +86,17 @@ def savee(ckpt, sr, if_f0, name, epoch):
|
||||
def show_info(path):
|
||||
try:
|
||||
a = torch.load(path, map_location="cpu")
|
||||
return "模型信息:%s\n采样率:%s\n模型是否输入音高引导:%s" % (
|
||||
return "模型信息:%s\n采样率:%s\n模型是否输入音高引导:%s\n版本:%s" % (
|
||||
a.get("info", "None"),
|
||||
a.get("sr", "None"),
|
||||
a.get("f0", "None"),
|
||||
a.get("version", "None"),
|
||||
)
|
||||
except:
|
||||
return traceback.format_exc()
|
||||
|
||||
|
||||
def extract_small_model(path, name, sr, if_f0, info):
|
||||
def extract_small_model(path, name, sr, if_f0, info,version):
|
||||
try:
|
||||
ckpt = torch.load(path, map_location="cpu")
|
||||
if "model" in ckpt:
|
||||
@ -171,6 +173,7 @@ def extract_small_model(path, name, sr, if_f0, info):
|
||||
if info == "":
|
||||
info = "Extracted model."
|
||||
opt["info"] = info
|
||||
opt["version"] = version
|
||||
opt["sr"] = sr
|
||||
opt["f0"] = int(if_f0)
|
||||
torch.save(opt, "weights/%s.pth" % name)
|
||||
@ -191,7 +194,7 @@ def change_info(path, info, name):
|
||||
return traceback.format_exc()
|
||||
|
||||
|
||||
def merge(path1, path2, alpha1, sr, f0, info, name):
|
||||
def merge(path1, path2, alpha1, sr, f0, info, name,version):
|
||||
try:
|
||||
|
||||
def extract(ckpt):
|
||||
@ -241,6 +244,7 @@ def merge(path1, path2, alpha1, sr, f0, info, name):
|
||||
"""
|
||||
opt["sr"] = sr
|
||||
opt["f0"] = 1 if f0 == "是" else 0
|
||||
opt["version"] = version
|
||||
opt["info"] = info
|
||||
torch.save(opt, "weights/%s.pth" % name)
|
||||
return "Success."
|
||||
|
@ -284,8 +284,8 @@ def get_hparams(init=True):
|
||||
bs done
|
||||
pretrainG、pretrainD done
|
||||
卡号:os.en["CUDA_VISIBLE_DEVICES"] done
|
||||
if_latest todo
|
||||
模型:if_f0 todo
|
||||
if_latest done
|
||||
模型:if_f0 done
|
||||
采样率:自动选择config done
|
||||
是否缓存数据集进GPU:if_cache_data_in_gpu done
|
||||
|
||||
@ -321,6 +321,12 @@ def get_hparams(init=True):
|
||||
parser.add_argument(
|
||||
"-sr", "--sample_rate", type=str, required=True, help="sample rate, 32k/40k/48k"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-sw", "--save_every_weights", type=str, default="0", help="save the extracted model in weights directory when saving checkpoints"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--version", type=str, required=True, help="model version"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f0",
|
||||
"--if_f0",
|
||||
@ -369,11 +375,13 @@ def get_hparams(init=True):
|
||||
hparams.total_epoch = args.total_epoch
|
||||
hparams.pretrainG = args.pretrainG
|
||||
hparams.pretrainD = args.pretrainD
|
||||
hparams.version = args.version
|
||||
hparams.gpus = args.gpus
|
||||
hparams.train.batch_size = args.batch_size
|
||||
hparams.sample_rate = args.sample_rate
|
||||
hparams.if_f0 = args.if_f0
|
||||
hparams.if_latest = args.if_latest
|
||||
hparams.save_every_weights = args.save_every_weights
|
||||
hparams.if_cache_data_in_gpu = args.if_cache_data_in_gpu
|
||||
hparams.data.training_files = "%s/filelist.txt" % experiment_dir
|
||||
return hparams
|
||||
|
Loading…
Reference in New Issue
Block a user