optimize(vc): apply model-style import

This commit is contained in:
源文雨 2024-06-02 00:57:03 +09:00
parent b0c33d1b75
commit 2991e75fc0
7 changed files with 12 additions and 7 deletions

View File

@ -6,7 +6,7 @@ now_dir = os.getcwd()
sys.path.append(now_dir)
load_dotenv()
load_dotenv("sha256.env")
from infer.modules.vc.modules import VC
from infer.modules.vc import VC
from infer.modules.uvr5.modules import uvr
from infer.lib.train.process_ckpt import (
change_info,

View File

@ -0,0 +1,3 @@
from .pipeline import Pipeline
from .modules import VC
from .utils import get_index_path_from_model, load_hubert

View File

@ -1,5 +1,6 @@
import traceback
import logging
import os
logger = logging.getLogger(__name__)
@ -15,8 +16,9 @@ from infer.lib.infer_pack.models import (
SynthesizerTrnMs768NSFsid,
SynthesizerTrnMs768NSFsid_nono,
)
from infer.modules.vc.pipeline import Pipeline
from infer.modules.vc.utils import *
from .pipeline import Pipeline
from .utils import get_index_path_from_model, load_hubert
class VC:

View File

@ -144,7 +144,7 @@ class Pipeline(object):
from infer.lib.rmvpe import RMVPE
logger.info(
"Loading rmvpe model,%s" % "%s/rmvpe.pt" % os.environ["rmvpe_root"]
"Loading rmvpe model %s" % "%s/rmvpe.pt" % os.environ["rmvpe_root"]
)
self.model_rmvpe = RMVPE(
"%s/rmvpe.pt" % os.environ["rmvpe_root"],

View File

@ -13,7 +13,7 @@ from dotenv import load_dotenv
from scipy.io import wavfile
from configs.config import Config
from infer.modules.vc.modules import VC
from infer.modules.vc import VC
def arg_parse() -> tuple:

View File

@ -8,7 +8,7 @@ from dotenv import load_dotenv
from scipy.io import wavfile
from configs.config import Config
from infer.modules.vc.modules import VC
from infer.modules.vc import VC
####
# USAGE

View File

@ -7,7 +7,7 @@ from dotenv import load_dotenv
from configs.config import Config
from i18n.i18n import I18nAuto
from infer.modules.vc.modules import VC
from infer.modules.vc import VC
logging.getLogger("numba").setLevel(logging.WARNING)
logging.getLogger("markdown_it").setLevel(logging.WARNING)