Skip to content

Commit

Permalink
Add skip for MHA test on ROCm EP
Browse files Browse the repository at this point in the history
  • Loading branch information
groenenboomj committed Aug 9, 2023
1 parent 6c56542 commit 07c92c2
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions onnxruntime/test/contrib_ops/multihead_attention_op_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@ static void RunMultiHeadAttentionTests(AttentionTestData& data, bool disable_cpu
// Test fused cross attention kernel
// It requires head_size > 32 and head_size <= 64 for T4 GPU; hidden_size == v_hidden_size.
TEST(MultiHeadAttentionTest, CrossAttention_Batch2_HeadSize40) {
ROCM_GTEST_SKIP("ROCm MHA skip");
AttentionTestData data;
GetCrossAttentionData_HeadSize40(data);
RunMultiHeadAttentionTests(data);
Expand Down Expand Up @@ -490,6 +491,7 @@ TEST(MultiHeadAttentionTest, CrossAttention_Batch2_HeadSize32_RightSidePadding_M
}

TEST(MultiHeadAttentionTest, CrossAttention_Batch1_HeadSize32_LeftSidePadding_Mask2D) {
ROCM_GTEST_SKIP("ROCm MHA skip");
AttentionTestData data;
GetCrossAttentionData_Batch1_HeadSize32_LeftSidePadding(data);
RunMultiHeadAttentionTests(data, true);
Expand All @@ -515,6 +517,7 @@ TEST(MultiHeadAttentionTest, SelfAttention_Batch2_HeadSize32_NoBias_NoMask_Packe

// This tests qk_head_size != v_head_size
TEST(MultiHeadAttentionTest, CrossAttention_Batch2_HeadSize16_8) {
ROCM_GTEST_SKIP("ROCm MHA skip");
AttentionTestData data;
GetCrossAttentionData_HeadSize16_8(data);
RunMultiHeadAttentionTests(data);
Expand All @@ -524,6 +527,7 @@ TEST(MultiHeadAttentionTest, CrossAttention_Batch2_HeadSize16_8) {
}

TEST(MultiHeadAttentionTest, CrossAttention_Batch1_HeadSize16) {
ROCM_GTEST_SKIP("ROCm MHA skip");
AttentionTestData data;
GetCrossAttentionData_HeadSize16(data);
RunMultiHeadAttentionTests(data);
Expand Down Expand Up @@ -555,6 +559,7 @@ TEST(MultiHeadAttentionTest, AttentionCutlassRelPosBias) {

TEST(MultiHeadAttentionTest, CrossAttention_DiffSequenceLengths) {
// Whisper decoder cross attention without mask and different sequence lengths for Q and K/V
ROCM_GTEST_SKIP("ROCm MHA skip");
AttentionTestData data;
GetCrossAttentionData_DiffSequenceLengths(data);
RunMultiHeadAttentionTests(data);
Expand Down

0 comments on commit 07c92c2

Please sign in to comment.