From f66fb3fa41ca8230ff8e61c976a3d60a33ad74f9 Mon Sep 17 00:00:00 2001 From: Logan Hallee <72926928+lhallee@users.noreply.github.com> Date: Tue, 1 Oct 2024 12:54:43 -0700 Subject: [PATCH] Documentation update FMHA __init__.py --- xformers/ops/fmha/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xformers/ops/fmha/__init__.py b/xformers/ops/fmha/__init__.py index ff07f3bd00..b9d164bf4c 100644 --- a/xformers/ops/fmha/__init__.py +++ b/xformers/ops/fmha/__init__.py @@ -227,7 +227,7 @@ def memory_efficient_attention( attn = attn.softmax(-1) attn = F.dropout(attn, p) attn = attn @ value - return attn.transpose(1, 2) + return attn.transpose(1, 2).contiguous() :Examples: