Commit 84823275 authored by C43H66N12O12S2's avatar C43H66N12O12S2 Committed by AUTOMATIC1111

readd xformers attnblock

parent 2043c4a2
......@@ -292,3 +292,18 @@ def cross_attention_attnblock_forward(self, x):
return h3
def xformers_attnblock_forward(self, x):
try:
h_ = x
h_ = self.norm(h_)
q = self.q(h_)
k = self.k(h_)
v = self.v(h_)
b, c, h, w = q.shape
q, k, v = map(lambda t: rearrange(t, 'b c h w -> b (h w) c'), (q, k, v))
out = xformers.ops.memory_efficient_attention(q, k, v)
out = rearrange(out, 'b (h w) c -> b c h w', h=h)
out = self.proj_out(out)
return x + out
except NotImplementedError:
return cross_attention_attnblock_forward(self, x)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment