Commit 603509ec authored by AUTOMATIC1111's avatar AUTOMATIC1111

as per wfjsw's suggestion, revert changes for sd_hijack_checkpoint.py

parent ad229fae
...@@ -4,19 +4,16 @@ import ldm.modules.attention ...@@ -4,19 +4,16 @@ import ldm.modules.attention
import ldm.modules.diffusionmodules.openaimodel import ldm.modules.diffusionmodules.openaimodel
# Setting flag=False so that torch skips checking parameters.
# parameters checking is expensive in frequent operations.
def BasicTransformerBlock_forward(self, x, context=None): def BasicTransformerBlock_forward(self, x, context=None):
return checkpoint(self._forward, x, context, flag=False) return checkpoint(self._forward, x, context)
def AttentionBlock_forward(self, x): def AttentionBlock_forward(self, x):
return checkpoint(self._forward, x, flag=False) return checkpoint(self._forward, x)
def ResBlock_forward(self, x, emb): def ResBlock_forward(self, x, emb):
return checkpoint(self._forward, x, emb, flag=False) return checkpoint(self._forward, x, emb)
stored = [] stored = []
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment