From d2eeac6d7da6b9c084351ed4456b39ddc5cbc426 Mon Sep 17 00:00:00 2001 From: Michael Melesse Date: Mon, 13 May 2024 14:36:34 -0400 Subject: [PATCH] skip backward (#586) --- python/perf-kernels/flash-attention.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/perf-kernels/flash-attention.py b/python/perf-kernels/flash-attention.py index 6fc861b281fa..d70a43ecd36c 100644 --- a/python/perf-kernels/flash-attention.py +++ b/python/perf-kernels/flash-attention.py @@ -1277,6 +1277,7 @@ def test_op_varlen_mqa_fwd(Z, HQ, HK, N_CTX, D_HEAD, causal, dtype=torch.float16 @pytest.mark.parametrize('use_alibi', [False, True]) def test_op_bwd(Z, H, N_CTX, D_HEAD, qseqlen_not_equal_kseqlen, causal, torch_sdpa_test, use_alibi, dtype=torch.float16): + pytest.skip() torch.manual_seed(20) if qseqlen_not_equal_kseqlen is not None: seqlen_q = qseqlen_not_equal_kseqlen