From 4b8d47f13ae5804129acfccede8c45813910c51a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 23 Jul 2023 01:56:43 +0800 Subject: [PATCH] Format code (#827) Co-authored-by: github-actions[bot] --- gui_v1.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/gui_v1.py b/gui_v1.py index 37db6ce..5cfbc3b 100644 --- a/gui_v1.py +++ b/gui_v1.py @@ -302,22 +302,26 @@ if __name__ == "__main__": if event == sg.WINDOW_CLOSED: self.flag_vc = False exit() - if event == 'reload_devices': - prev_input = self.window['sg_input_device'].get() - prev_output = self.window['sg_output_device'].get() + if event == "reload_devices": + prev_input = self.window["sg_input_device"].get() + prev_output = self.window["sg_output_device"].get() input_devices, output_devices, _, _ = self.get_devices(update=True) if prev_input not in input_devices: - self.config.sg_input_device = input_devices[0] + self.config.sg_input_device = input_devices[0] else: - self.config.sg_input_device = prev_input - self.window['sg_input_device'].Update(values=input_devices) - self.window['sg_input_device'].Update(value=self.config.sg_input_device) + self.config.sg_input_device = prev_input + self.window["sg_input_device"].Update(values=input_devices) + self.window["sg_input_device"].Update( + value=self.config.sg_input_device + ) if prev_output not in output_devices: - self.config.sg_output_device = output_devices[0] + self.config.sg_output_device = output_devices[0] else: - self.config.sg_output_device = prev_output - self.window['sg_output_device'].Update(values=output_devices) - self.window['sg_output_device'].Update(value=self.config.sg_output_device) + self.config.sg_output_device = prev_output + self.window["sg_output_device"].Update(values=output_devices) + self.window["sg_output_device"].Update( + value=self.config.sg_output_device + ) if event == "start_vc" and self.flag_vc == False: if self.set_values(values) == True: print("using_cuda:" + str(torch.cuda.is_available()))