From 8ce974032f7d1eeafdd4bbe94432e0f899f82214 Mon Sep 17 00:00:00 2001 From: Leeran <1311781983@qq.com> Date: Wed, 14 Aug 2024 06:09:16 +0000 Subject: [PATCH 1/2] add test01. Signed-off-by: Leeran <1311781983@qq.com> --- test01 | 60 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 test01 diff --git a/test01 b/test01 new file mode 100644 index 0000000..95c95b6 --- /dev/null +++ b/test01 @@ -0,0 +1,60 @@ +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad.h +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad_proto.cpp +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad_def.cpp +src/utils/inc/fallback_comm.h +src/utils/inc/fallback.h +src/utils/src/fallback_comm.cpp +src/transformer/ffn/ophost/ffn_proto.cpp +src/transformer/ffn/ophost/aclnn_ffn.h +src/transformer/ffn/ophost/aclnn_ffn_v2.h +src/transformer/ffn/ophost/aclnn_ffn_v3.h +src/transformer/ffn/ophost/aclnn_ffn.cpp +src/transformer/ffn/ophost/ffn.h +src/transformer/ffn/ophost/ffn.cpp +src/transformer/ffn/ophost/fallback_ffn.cpp +src/transformer/ffn/ophost/ffn_def.cpp +src/transformer/ffn/ffn_nonquant_nz.h +src/transformer/incre_flash_attention/ifa_public_define.h +src/transformer/incre_flash_attention/incre_flash_attention.cpp +src/transformer/incre_flash_attention/incre_flash_attention_allvec_new.h +src/transformer/incre_flash_attention/incre_flash_attention_split_Bbn2s2_Us2.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v2.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v3.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v3.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v4.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v4.h +src/transformer/incre_flash_attention/ophost/fallback_incre_flash_attention.cpp +src/transformer/incre_flash_attention/ophost/incre_flash_attention_def.cpp +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling.cc +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling.h +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling_register.cc +src/transformer/prompt_flash_attention/kernel_data_copy_transpose.h +src/transformer/prompt_flash_attention/kernel_operator_softmax_compute_nz.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_inner.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_inner.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v2.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v2.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v3.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v3.h +src/transformer/prompt_flash_attention/ophost/fallback_prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_def.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention.h +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_proto.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_base_aclnn.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling.h +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling_register.cc +src/transformer/prompt_flash_attention/prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/prompt_flash_attention_base.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tail.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tailWBNSD.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tailWBNSD_KV_NZ.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_tail.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_tailWBNSD.h +src/transformer/prompt_flash_attention/prompt_flash_attention_empty_tensor.h +src/transformer/prompt_flash_attention/prompt_flash_attention_nz_kv_base.h +1 \ No newline at end of file -- Gitee From c0a60c2c799e06dfcc0203e62c926ed90a165a0e Mon Sep 17 00:00:00 2001 From: Leeran <1311781983@qq.com> Date: Wed, 14 Aug 2024 06:14:28 +0000 Subject: [PATCH 2/2] add tests/test. Signed-off-by: Leeran <1311781983@qq.com> --- tests/test | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 tests/test diff --git a/tests/test b/tests/test new file mode 100644 index 0000000..fc13291 --- /dev/null +++ b/tests/test @@ -0,0 +1,42 @@ +# Copyright (c) 2024 Huawei Technologies Co., Ltd. +# This file is a part of the CANN Open Software. +# Licensed under CANN Open Software License Agreement Version 1.0 (the "License"). +# Please refer to the License for details. You may not use this file except in compliance with the License. +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, +# INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. +# See LICENSE in the root of the software repository for the full text of the License. +# ====================================================================================================================== + +add_executable(test_incre_flash_attention + test_incre_flash_attention.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case + ACLNN_FUNC "aclnnIncreFlashAttention" +) + +add_executable(test_incre_flash_attention_v2 + test_incre_flash_attention_v2.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention_v2 + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case_v2 + ACLNN_FUNC "aclnnIncreFlashAttentionV2" +) + +add_executable(test_incre_flash_attention_v3 + test_incre_flash_attention_v3.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention_v3 + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case_v3 + ACLNN_FUNC "aclnnIncreFlashAttentionV3" +) + +add_executable(test_incre_flash_attention_v4 + test_incre_flash_attention_v4.cpp +) \ No newline at end of file -- Gitee