mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2024-12-29 02:45:05 +08:00
as per wfjsw's suggestion, revert changes for sd_hijack_checkpoint.py
This commit is contained in:
parent
ad229fae43
commit
603509ec90
@ -4,19 +4,16 @@ import ldm.modules.attention
|
||||
import ldm.modules.diffusionmodules.openaimodel
|
||||
|
||||
|
||||
# Setting flag=False so that torch skips checking parameters.
|
||||
# parameters checking is expensive in frequent operations.
|
||||
|
||||
def BasicTransformerBlock_forward(self, x, context=None):
|
||||
return checkpoint(self._forward, x, context, flag=False)
|
||||
return checkpoint(self._forward, x, context)
|
||||
|
||||
|
||||
def AttentionBlock_forward(self, x):
|
||||
return checkpoint(self._forward, x, flag=False)
|
||||
return checkpoint(self._forward, x)
|
||||
|
||||
|
||||
def ResBlock_forward(self, x, emb):
|
||||
return checkpoint(self._forward, x, emb, flag=False)
|
||||
return checkpoint(self._forward, x, emb)
|
||||
|
||||
|
||||
stored = []
|
||||
|
Loading…
Reference in New Issue
Block a user