mirror of
https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI.git
synced 2025-05-06 20:01:37 +08:00
Fix memory doesn't unload on mps device (#2035)
* Solving the cache not being cleared in mps * Fix protect not to be dict
This commit is contained in:
parent
fb16ba36b9
commit
d255818097
@ -105,4 +105,7 @@ def uvr(model_name, inp_root, save_root_vocal, paths, save_root_ins, agg, format
|
||||
if torch.cuda.is_available():
|
||||
torch.cuda.empty_cache()
|
||||
logger.info("Executed torch.cuda.empty_cache()")
|
||||
elif torch.backends.mps.is_available():
|
||||
torch.mps.empty_cache()
|
||||
logger.info("Executed torch.mps.empty_cache()")
|
||||
yield "\n".join(infos)
|
||||
|
@ -62,6 +62,8 @@ class VC:
|
||||
) = None
|
||||
if torch.cuda.is_available():
|
||||
torch.cuda.empty_cache()
|
||||
elif torch.backends.mps.is_available():
|
||||
torch.mps.empty_cache()
|
||||
###楼下不这么折腾清理不干净
|
||||
self.if_f0 = self.cpt.get("f0", 1)
|
||||
self.version = self.cpt.get("version", "v1")
|
||||
@ -82,18 +84,12 @@ class VC:
|
||||
del self.net_g, self.cpt
|
||||
if torch.cuda.is_available():
|
||||
torch.cuda.empty_cache()
|
||||
elif torch.backends.mps.is_available():
|
||||
torch.mps.empty_cache()
|
||||
return (
|
||||
{"visible": False, "__type__": "update"},
|
||||
{
|
||||
"visible": True,
|
||||
"value": to_return_protect0,
|
||||
"__type__": "update",
|
||||
},
|
||||
{
|
||||
"visible": True,
|
||||
"value": to_return_protect1,
|
||||
"__type__": "update",
|
||||
},
|
||||
to_return_protect0,
|
||||
to_return_protect1,
|
||||
"",
|
||||
"",
|
||||
)
|
||||
|
@ -291,6 +291,8 @@ class Pipeline(object):
|
||||
del feats, p_len, padding_mask
|
||||
if torch.cuda.is_available():
|
||||
torch.cuda.empty_cache()
|
||||
elif torch.backends.mps.is_available():
|
||||
torch.mps.empty_cache()
|
||||
t2 = ttime()
|
||||
times[0] += t1 - t0
|
||||
times[2] += t2 - t1
|
||||
@ -472,4 +474,6 @@ class Pipeline(object):
|
||||
del pitch, pitchf, sid
|
||||
if torch.cuda.is_available():
|
||||
torch.cuda.empty_cache()
|
||||
elif torch.backends.mps.is_available():
|
||||
torch.mps.empty_cache()
|
||||
return audio_opt
|
||||
|
Loading…
x
Reference in New Issue
Block a user