as per wfjsw's suggestion, revert changes for sd_hijack_checkpoint.py

This commit is contained in:
AUTOMATIC1111 2024-06-08 10:54:41 +03:00
parent ad229fae43
commit 603509ec90

View File

@ -4,19 +4,16 @@ import ldm.modules.attention
import ldm.modules.diffusionmodules.openaimodel
# Setting flag=False so that torch skips checking parameters.
# parameters checking is expensive in frequent operations.
def BasicTransformerBlock_forward(self, x, context=None):
return checkpoint(self._forward, x, context, flag=False)
return checkpoint(self._forward, x, context)
def AttentionBlock_forward(self, x):
return checkpoint(self._forward, x, flag=False)
return checkpoint(self._forward, x)
def ResBlock_forward(self, x, emb):
return checkpoint(self._forward, x, emb, flag=False)
return checkpoint(self._forward, x, emb)
stored = []