From da9a02049f18a9b328e3d19f5e86c0a469ea56ae Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 16:22:22 +0900 Subject: [PATCH] chore(format): run black on dev (#2007) Co-authored-by: github-actions[bot] --- infer/modules/vc/pipeline.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/infer/modules/vc/pipeline.py b/infer/modules/vc/pipeline.py index 88950b0..371836b 100644 --- a/infer/modules/vc/pipeline.py +++ b/infer/modules/vc/pipeline.py @@ -164,12 +164,17 @@ class Pipeline(object): logger.info("Loading fcpe model") self.model_fcpe = spawn_bundled_infer_model(self.device) - f0 = self.model_fcpe.infer( - torch.from_numpy(x).to(self.device).unsqueeze(0).float(), - sr=16000, - decoder_mode="local_argmax", - threshold=0.006, - ).squeeze().cpu().numpy() + f0 = ( + self.model_fcpe.infer( + torch.from_numpy(x).to(self.device).unsqueeze(0).float(), + sr=16000, + decoder_mode="local_argmax", + threshold=0.006, + ) + .squeeze() + .cpu() + .numpy() + ) f0 *= pow(2, f0_up_key / 12) # with open("test.txt","w")as f:f.write("\n".join([str(i)for i in f0.tolist()]))