Commit 2043c4a2 authored by C43H66N12O12S2's avatar C43H66N12O12S2 Committed by AUTOMATIC1111

delete xformers attnblock

parent 786ed499
......@@ -292,15 +292,3 @@ def cross_attention_attnblock_forward(self, x):
return h3
def xformers_attnblock_forward(self, x):
try:
h_ = x
h_ = self.norm(h_)
q1 = self.q(h_).contiguous()
k1 = self.k(h_).contiguous()
v = self.v(h_).contiguous()
out = xformers.ops.memory_efficient_attention(q1, k1, v)
out = self.proj_out(out)
return x + out
except NotImplementedError:
return cross_attention_attnblock_forward(self, x)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment