Commit 7ff1170a authored by AUTOMATIC's avatar AUTOMATIC

emergency fix for xformers (continue + shared)

parent 48feae37
import math import math
import torch import torch
from torch import einsum from torch import einsum
try:
import xformers.ops
import functorch
xformers._is_functorch_available = True
shared.xformers_available = True
except:
print('Cannot find xformers, defaulting to split attention. Try setting --xformers in your webui-user file if you wish to install it.')
continue
from ldm.util import default from ldm.util import default
from einops import rearrange from einops import rearrange
from modules import shared from modules import shared
try:
import xformers.ops
import functorch
xformers._is_functorch_available = True
shared.xformers_available = True
except Exception:
print('Cannot find xformers, defaulting to split attention. Try adding --xformers commandline argument to your webui-user file if you wish to install it.')
# see https://github.com/basujindal/stable-diffusion/pull/117 for discussion # see https://github.com/basujindal/stable-diffusion/pull/117 for discussion
def split_cross_attention_forward_v1(self, x, context=None, mask=None): def split_cross_attention_forward_v1(self, x, context=None, mask=None):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment