mirror of
https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI.git
synced 2025-02-06 21:52:50 +08:00
optimize: no json config writeback
This commit is contained in:
parent
3f78b73ec7
commit
ad85b02ed9
@ -1,6 +1,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
@ -10,23 +11,13 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def use_fp32_config():
|
version_config_list = [
|
||||||
for config_file in [
|
|
||||||
"v1/32k.json",
|
"v1/32k.json",
|
||||||
"v1/40k.json",
|
"v1/40k.json",
|
||||||
"v1/48k.json",
|
"v1/48k.json",
|
||||||
"v2/48k.json",
|
"v2/48k.json",
|
||||||
"v2/32k.json",
|
"v2/32k.json",
|
||||||
]:
|
]
|
||||||
with open(f"configs/{config_file}", "r") as f:
|
|
||||||
strr = f.read().replace("true", "false")
|
|
||||||
with open(f"configs/{config_file}", "w") as f:
|
|
||||||
f.write(strr)
|
|
||||||
with open("infer/modules/train/preprocess.py", "r") as f:
|
|
||||||
strr = f.read().replace("3.7", "3.0")
|
|
||||||
with open("infer/modules/train/preprocess.py", "w") as f:
|
|
||||||
f.write(strr)
|
|
||||||
|
|
||||||
|
|
||||||
def singleton_variable(func):
|
def singleton_variable(func):
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
@ -45,6 +36,7 @@ class Config:
|
|||||||
self.is_half = True
|
self.is_half = True
|
||||||
self.n_cpu = 0
|
self.n_cpu = 0
|
||||||
self.gpu_name = None
|
self.gpu_name = None
|
||||||
|
self.json_config = self.load_config_json()
|
||||||
self.gpu_mem = None
|
self.gpu_mem = None
|
||||||
(
|
(
|
||||||
self.python_cmd,
|
self.python_cmd,
|
||||||
@ -57,6 +49,14 @@ class Config:
|
|||||||
self.instead = ""
|
self.instead = ""
|
||||||
self.x_pad, self.x_query, self.x_center, self.x_max = self.device_config()
|
self.x_pad, self.x_query, self.x_center, self.x_max = self.device_config()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_config_json() -> dict:
|
||||||
|
d = {}
|
||||||
|
for config_file in version_config_list:
|
||||||
|
with open(f"configs/{config_file}", "r") as f:
|
||||||
|
d[config_file] = json.load(f)
|
||||||
|
return d
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def arg_parse() -> tuple:
|
def arg_parse() -> tuple:
|
||||||
exe = sys.executable or "python"
|
exe = sys.executable or "python"
|
||||||
@ -102,6 +102,10 @@ class Config:
|
|||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def use_fp32_config(self):
|
||||||
|
for config_file in version_config_list:
|
||||||
|
self.json_config[config_file]["train"]["fp16_run"] = False
|
||||||
|
|
||||||
def device_config(self) -> tuple:
|
def device_config(self) -> tuple:
|
||||||
if torch.cuda.is_available():
|
if torch.cuda.is_available():
|
||||||
i_device = int(self.device.split(":")[-1])
|
i_device = int(self.device.split(":")[-1])
|
||||||
@ -116,7 +120,7 @@ class Config:
|
|||||||
):
|
):
|
||||||
logger.info("Found GPU %s, force to fp32", self.gpu_name)
|
logger.info("Found GPU %s, force to fp32", self.gpu_name)
|
||||||
self.is_half = False
|
self.is_half = False
|
||||||
use_fp32_config()
|
self.use_fp32_config()
|
||||||
else:
|
else:
|
||||||
logger.info("Found GPU %s", self.gpu_name)
|
logger.info("Found GPU %s", self.gpu_name)
|
||||||
self.gpu_mem = int(
|
self.gpu_mem = int(
|
||||||
@ -135,12 +139,12 @@ class Config:
|
|||||||
logger.info("No supported Nvidia GPU found")
|
logger.info("No supported Nvidia GPU found")
|
||||||
self.device = self.instead = "mps"
|
self.device = self.instead = "mps"
|
||||||
self.is_half = False
|
self.is_half = False
|
||||||
use_fp32_config()
|
self.use_fp32_config()
|
||||||
else:
|
else:
|
||||||
logger.info("No supported Nvidia GPU found")
|
logger.info("No supported Nvidia GPU found")
|
||||||
self.device = self.instead = "cpu"
|
self.device = self.instead = "cpu"
|
||||||
self.is_half = False
|
self.is_half = False
|
||||||
use_fp32_config()
|
self.use_fp32_config()
|
||||||
|
|
||||||
if self.n_cpu == 0:
|
if self.n_cpu == 0:
|
||||||
self.n_cpu = cpu_count()
|
self.n_cpu = cpu_count()
|
||||||
|
@ -358,7 +358,7 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
if event == "start_vc" and self.flag_vc == False:
|
if event == "start_vc" and self.flag_vc == False:
|
||||||
if self.set_values(values) == True:
|
if self.set_values(values) == True:
|
||||||
logger.info("Use CUDA: %b", torch.cuda.is_available())
|
logger.info("Use CUDA: %s", torch.cuda.is_available())
|
||||||
self.start_vc()
|
self.start_vc()
|
||||||
settings = {
|
settings = {
|
||||||
"pth_path": values["pth_path"],
|
"pth_path": values["pth_path"],
|
||||||
|
39
infer-web.py
39
infer-web.py
@ -10,6 +10,8 @@ import warnings
|
|||||||
from random import shuffle
|
from random import shuffle
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
import json
|
||||||
|
import pathlib
|
||||||
|
|
||||||
import fairseq
|
import fairseq
|
||||||
import faiss
|
import faiss
|
||||||
@ -200,20 +202,15 @@ def if_done_multi(done, ps):
|
|||||||
done[0] = True
|
done[0] = True
|
||||||
|
|
||||||
|
|
||||||
def get_quoted_python_cmd():
|
|
||||||
return f'"{config.python_cmd}"'
|
|
||||||
|
|
||||||
|
|
||||||
def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
|
def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
|
||||||
sr = sr_dict[sr]
|
sr = sr_dict[sr]
|
||||||
os.makedirs("%s/logs/%s" % (now_dir, exp_dir), exist_ok=True)
|
os.makedirs("%s/logs/%s" % (now_dir, exp_dir), exist_ok=True)
|
||||||
f = open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "w")
|
f = open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "w")
|
||||||
f.close()
|
f.close()
|
||||||
|
per = 3.0 if config.is_half else 3.7
|
||||||
cmd = (
|
cmd = (
|
||||||
get_quoted_python_cmd()
|
'"%s" infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s" %s %.1f'
|
||||||
+ ' infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s" '
|
% (config.python_cmd, trainset_dir, sr, n_p, now_dir, exp_dir, config.noparallel, per)
|
||||||
% (trainset_dir, sr, n_p, now_dir, exp_dir)
|
|
||||||
+ str(config.noparallel)
|
|
||||||
)
|
)
|
||||||
logger.info(cmd)
|
logger.info(cmd)
|
||||||
p = Popen(cmd, shell=True) # , stdin=PIPE, stdout=PIPE,stderr=PIPE,cwd=now_dir
|
p = Popen(cmd, shell=True) # , stdin=PIPE, stdout=PIPE,stderr=PIPE,cwd=now_dir
|
||||||
@ -247,9 +244,9 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
|
|||||||
if if_f0:
|
if if_f0:
|
||||||
if f0method != "rmvpe_gpu":
|
if f0method != "rmvpe_gpu":
|
||||||
cmd = (
|
cmd = (
|
||||||
get_quoted_python_cmd()
|
'"%s" infer/modules/train/extract/extract_f0_print.py "%s/logs/%s" %s %s'
|
||||||
+ ' infer/modules/train/extract/extract_f0_print.py "%s/logs/%s" %s %s'
|
|
||||||
% (
|
% (
|
||||||
|
config.python_cmd,
|
||||||
now_dir,
|
now_dir,
|
||||||
exp_dir,
|
exp_dir,
|
||||||
n_p,
|
n_p,
|
||||||
@ -275,7 +272,8 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
|
|||||||
leng = len(gpus_rmvpe)
|
leng = len(gpus_rmvpe)
|
||||||
ps = []
|
ps = []
|
||||||
for idx, n_g in enumerate(gpus_rmvpe):
|
for idx, n_g in enumerate(gpus_rmvpe):
|
||||||
cmd = get_quoted_python_cmd() + ' infer/modules/train/extract/extract_f0_rmvpe.py %s %s %s "%s/logs/%s" %s ' % (
|
cmd = '"%s" infer/modules/train/extract/extract_f0_rmvpe.py %s %s %s "%s/logs/%s" %s ' % (
|
||||||
|
config.python_cmd,
|
||||||
leng,
|
leng,
|
||||||
idx,
|
idx,
|
||||||
n_g,
|
n_g,
|
||||||
@ -335,7 +333,8 @@ def extract_f0_feature(gpus, n_p, f0method, if_f0, exp_dir, version19, gpus_rmvp
|
|||||||
leng = len(gpus)
|
leng = len(gpus)
|
||||||
ps = []
|
ps = []
|
||||||
for idx, n_g in enumerate(gpus):
|
for idx, n_g in enumerate(gpus):
|
||||||
cmd = get_quoted_python_cmd() + ' infer/modules/train/extract_feature_print.py %s %s %s %s "%s/logs/%s" %s' % (
|
cmd = '"%s" infer/modules/train/extract_feature_print.py %s %s %s %s "%s/logs/%s" %s' % (
|
||||||
|
config.python_cmd,
|
||||||
config.device,
|
config.device,
|
||||||
leng,
|
leng,
|
||||||
idx,
|
idx,
|
||||||
@ -516,8 +515,18 @@ def click_train(
|
|||||||
logger.info("No pretrained Generator")
|
logger.info("No pretrained Generator")
|
||||||
if pretrained_D15 == "":
|
if pretrained_D15 == "":
|
||||||
logger.info("No pretrained Discriminator")
|
logger.info("No pretrained Discriminator")
|
||||||
|
if version19 == "v1" or sr2 == "40k":
|
||||||
|
config_path = "v1/%s.json" % sr2
|
||||||
|
else:
|
||||||
|
config_path = "v2/%s.json" % sr2
|
||||||
|
config_save_path = os.path.join(exp_dir, "config.json")
|
||||||
|
if not pathlib.Path(config_save_path).exists():
|
||||||
|
with open(config_save_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(config.json_config[config_path], f, ensure_ascii=False, indent=4, sort_keys=True)
|
||||||
|
f.write("\n")
|
||||||
if gpus16:
|
if gpus16:
|
||||||
cmd = get_quoted_python_cmd() + ' infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -g %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s' % (
|
cmd = '"%s" infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -g %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s' % (
|
||||||
|
config.python_cmd,
|
||||||
exp_dir1,
|
exp_dir1,
|
||||||
sr2,
|
sr2,
|
||||||
1 if if_f0_3 else 0,
|
1 if if_f0_3 else 0,
|
||||||
@ -534,9 +543,9 @@ def click_train(
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
cmd = (
|
cmd = (
|
||||||
config.python_cmd
|
'"%s" infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s'
|
||||||
+ ' infer/modules/train/train.py -e "%s" -sr %s -f0 %s -bs %s -te %s -se %s %s %s -l %s -c %s -sw %s -v %s'
|
|
||||||
% (
|
% (
|
||||||
|
config.python_cmd,
|
||||||
exp_dir1,
|
exp_dir1,
|
||||||
sr2,
|
sr2,
|
||||||
1 if if_f0_3 else 0,
|
1 if if_f0_3 else 0,
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import shutil
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
@ -297,7 +297,6 @@ def get_hparams(init=True):
|
|||||||
-c不要了
|
-c不要了
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
# parser.add_argument('-c', '--config', type=str, default="configs/40k.json",help='JSON file for configuration')
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-se",
|
"-se",
|
||||||
"--save_every_epoch",
|
"--save_every_epoch",
|
||||||
@ -360,23 +359,9 @@ def get_hparams(init=True):
|
|||||||
name = args.experiment_dir
|
name = args.experiment_dir
|
||||||
experiment_dir = os.path.join("./logs", args.experiment_dir)
|
experiment_dir = os.path.join("./logs", args.experiment_dir)
|
||||||
|
|
||||||
if not os.path.exists(experiment_dir):
|
|
||||||
os.makedirs(experiment_dir)
|
|
||||||
|
|
||||||
if args.version == "v1" or args.sample_rate == "40k":
|
|
||||||
config_path = "configs/v1/%s.json" % args.sample_rate
|
|
||||||
else:
|
|
||||||
config_path = "configs/v2/%s.json" % args.sample_rate
|
|
||||||
config_save_path = os.path.join(experiment_dir, "config.json")
|
config_save_path = os.path.join(experiment_dir, "config.json")
|
||||||
if init:
|
|
||||||
with open(config_path, "r") as f:
|
|
||||||
data = f.read()
|
|
||||||
with open(config_save_path, "w") as f:
|
|
||||||
f.write(data)
|
|
||||||
else:
|
|
||||||
with open(config_save_path, "r") as f:
|
with open(config_save_path, "r") as f:
|
||||||
data = f.read()
|
config = json.load(f)
|
||||||
config = json.loads(data)
|
|
||||||
|
|
||||||
hparams = HParams(**config)
|
hparams = HParams(**config)
|
||||||
hparams.model_dir = hparams.experiment_dir = experiment_dir
|
hparams.model_dir = hparams.experiment_dir = experiment_dir
|
||||||
|
@ -12,6 +12,7 @@ sr = int(sys.argv[2])
|
|||||||
n_p = int(sys.argv[3])
|
n_p = int(sys.argv[3])
|
||||||
exp_dir = sys.argv[4]
|
exp_dir = sys.argv[4]
|
||||||
noparallel = sys.argv[5] == "True"
|
noparallel = sys.argv[5] == "True"
|
||||||
|
per = float(sys.argv[6])
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import traceback
|
import traceback
|
||||||
@ -36,7 +37,7 @@ def println(strr):
|
|||||||
|
|
||||||
|
|
||||||
class PreProcess:
|
class PreProcess:
|
||||||
def __init__(self, sr, exp_dir):
|
def __init__(self, sr, exp_dir, per=3.7):
|
||||||
self.slicer = Slicer(
|
self.slicer = Slicer(
|
||||||
sr=sr,
|
sr=sr,
|
||||||
threshold=-42,
|
threshold=-42,
|
||||||
@ -47,7 +48,7 @@ class PreProcess:
|
|||||||
)
|
)
|
||||||
self.sr = sr
|
self.sr = sr
|
||||||
self.bh, self.ah = signal.butter(N=5, Wn=48, btype="high", fs=self.sr)
|
self.bh, self.ah = signal.butter(N=5, Wn=48, btype="high", fs=self.sr)
|
||||||
self.per = 3.0
|
self.per = per
|
||||||
self.overlap = 0.3
|
self.overlap = 0.3
|
||||||
self.tail = self.per + self.overlap
|
self.tail = self.per + self.overlap
|
||||||
self.max = 0.9
|
self.max = 0.9
|
||||||
@ -134,8 +135,8 @@ class PreProcess:
|
|||||||
println("Fail. %s" % traceback.format_exc())
|
println("Fail. %s" % traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
def preprocess_trainset(inp_root, sr, n_p, exp_dir):
|
def preprocess_trainset(inp_root, sr, n_p, exp_dir, per):
|
||||||
pp = PreProcess(sr, exp_dir)
|
pp = PreProcess(sr, exp_dir, per)
|
||||||
println("start preprocess")
|
println("start preprocess")
|
||||||
println(sys.argv)
|
println(sys.argv)
|
||||||
pp.pipeline_mp_inp_dir(inp_root, n_p)
|
pp.pipeline_mp_inp_dir(inp_root, n_p)
|
||||||
@ -143,4 +144,4 @@ def preprocess_trainset(inp_root, sr, n_p, exp_dir):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
preprocess_trainset(inp_root, sr, n_p, exp_dir)
|
preprocess_trainset(inp_root, sr, n_p, exp_dir, per)
|
||||||
|
@ -210,12 +210,12 @@ class VC:
|
|||||||
if self.tgt_sr != resample_sr >= 16000:
|
if self.tgt_sr != resample_sr >= 16000:
|
||||||
self.tgt_sr = resample_sr
|
self.tgt_sr = resample_sr
|
||||||
index_info = (
|
index_info = (
|
||||||
"Using index:%s." % file_index
|
"Index:\n%s." % file_index
|
||||||
if os.path.exists(file_index)
|
if os.path.exists(file_index)
|
||||||
else "Index not used."
|
else "Index not used."
|
||||||
)
|
)
|
||||||
return (
|
return (
|
||||||
f"Success.\n {index_info}\nTime:\n npy:{times[0]}s, f0:{times[1]}s, infer:{times[2]}s",
|
"Success.\n%s\nTime:\nnpy: %.2fs, f0: %.2fs, infer: %.2fs." % (index_info, *times),
|
||||||
(self.tgt_sr, audio_opt),
|
(self.tgt_sr, audio_opt),
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
|
4
run.sh
4
run.sh
@ -53,5 +53,9 @@ fi
|
|||||||
# Download models
|
# Download models
|
||||||
./tools/dlmodels.sh
|
./tools/dlmodels.sh
|
||||||
|
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Run the main script
|
# Run the main script
|
||||||
python3 infer-web.py --pycmd python3
|
python3 infer-web.py --pycmd python3
|
||||||
|
@ -39,6 +39,7 @@ VR_DeEchoAggressive="VR-DeEchoAggressive.pth"
|
|||||||
VR_DeEchoDeReverb="VR-DeEchoDeReverb.pth"
|
VR_DeEchoDeReverb="VR-DeEchoDeReverb.pth"
|
||||||
VR_DeEchoNormal="VR-DeEchoNormal.pth"
|
VR_DeEchoNormal="VR-DeEchoNormal.pth"
|
||||||
onnx_dereverb="vocals.onnx"
|
onnx_dereverb="vocals.onnx"
|
||||||
|
rmvpe="rmvpe.pt"
|
||||||
|
|
||||||
dlhp2_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP2_all_vocals.pth"
|
dlhp2_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP2_all_vocals.pth"
|
||||||
dlhp3_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP3_all_vocals.pth"
|
dlhp3_all="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/HP3_all_vocals.pth"
|
||||||
@ -47,6 +48,7 @@ dlVR_DeEchoAggressive="https://huggingface.co/lj1995/VoiceConversionWebUI/resolv
|
|||||||
dlVR_DeEchoDeReverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoDeReverb.pth"
|
dlVR_DeEchoDeReverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoDeReverb.pth"
|
||||||
dlVR_DeEchoNormal="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoNormal.pth"
|
dlVR_DeEchoNormal="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/VR-DeEchoNormal.pth"
|
||||||
dlonnx_dereverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/onnx_dereverb_By_FoxJoy/vocals.onnx"
|
dlonnx_dereverb="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/uvr5_weights/onnx_dereverb_By_FoxJoy/vocals.onnx"
|
||||||
|
dlrmvpe="https://huggingface.co/lj1995/VoiceConversionWebUI/resolve/main/rmvpe.pt"
|
||||||
|
|
||||||
hb="hubert_base.pt"
|
hb="hubert_base.pt"
|
||||||
|
|
||||||
@ -523,6 +525,25 @@ else
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo checking $rmvpe
|
||||||
|
if [ -f "./assets/rmvpe/$rmvpe" ]; then
|
||||||
|
echo $rmvpe in ./assets/rmvpe checked.
|
||||||
|
else
|
||||||
|
echo failed. starting download from huggingface.
|
||||||
|
if command -v aria2c &> /dev/null; then
|
||||||
|
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M $dlrmvpe -d ./assets/rmvpe -o $rmvpe
|
||||||
|
if [ -f "./assets/rmvpe/$rmvpe" ]; then
|
||||||
|
echo download successful.
|
||||||
|
else
|
||||||
|
echo please try again!
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo aria2c command not found. Please install aria2c and try again.
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
echo checking $hb
|
echo checking $hb
|
||||||
if [ -f "./assets/hubert/$hb" ]; then
|
if [ -f "./assets/hubert/$hb" ]; then
|
||||||
echo $hb in ./assets/hubert/pretrained checked.
|
echo $hb in ./assets/hubert/pretrained checked.
|
||||||
|
Loading…
Reference in New Issue
Block a user