Try again with vae tiled decoding if regular fails because of OOM.

This commit is contained in:
comfyanonymous
2023-03-22 14:49:00 -04:00
parent aae9fe0cf9
commit 3ed4a4e4e6
5 changed files with 28 additions and 28 deletions

View File

@@ -20,11 +20,6 @@ if model_management.xformers_enabled():
import os
_ATTN_PRECISION = os.environ.get("ATTN_PRECISION", "fp32")
try:
OOM_EXCEPTION = torch.cuda.OutOfMemoryError
except:
OOM_EXCEPTION = Exception
def exists(val):
return val is not None
@@ -312,7 +307,7 @@ class CrossAttentionDoggettx(nn.Module):
r1[:, i:end] = einsum('b i j, b j d -> b i d', s2, v)
del s2
break
except OOM_EXCEPTION as e:
except model_management.OOM_EXCEPTION as e:
if first_op_done == False:
torch.cuda.empty_cache()
torch.cuda.ipc_collect()

View File

@@ -13,11 +13,6 @@ if model_management.xformers_enabled():
import xformers
import xformers.ops
try:
OOM_EXCEPTION = torch.cuda.OutOfMemoryError
except:
OOM_EXCEPTION = Exception
def get_timestep_embedding(timesteps, embedding_dim):
"""
This matches the implementation in Denoising Diffusion Probabilistic Models:
@@ -221,7 +216,7 @@ class AttnBlock(nn.Module):
r1[:, :, i:end] = torch.bmm(v, s2)
del s2
break
except OOM_EXCEPTION as e:
except model_management.OOM_EXCEPTION as e:
steps *= 2
if steps > 128:
raise e

View File

@@ -24,10 +24,7 @@ except ImportError:
from torch import Tensor
from typing import List
try:
OOM_EXCEPTION = torch.cuda.OutOfMemoryError
except:
OOM_EXCEPTION = Exception
import model_management
def dynamic_slice(
x: Tensor,
@@ -161,7 +158,7 @@ def _get_attention_scores_no_kv_chunking(
try:
attn_probs = attn_scores.softmax(dim=-1)
del attn_scores
except OOM_EXCEPTION:
except model_management.OOM_EXCEPTION:
print("ran out of memory while running softmax in _get_attention_scores_no_kv_chunking, trying slower in place softmax instead")
attn_scores -= attn_scores.max(dim=-1, keepdim=True).values
torch.exp(attn_scores, out=attn_scores)