diff --git a/test01 b/test01 new file mode 100644 index 0000000000000000000000000000000000000000..95c95b6e8a5185dd41bd426e574bb6cc7699aee6 --- /dev/null +++ b/test01 @@ -0,0 +1,60 @@ +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad.h +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad_proto.cpp +src/transformer/flash_attention_score_grad/ophost/flash_attention_score_grad_def.cpp +src/utils/inc/fallback_comm.h +src/utils/inc/fallback.h +src/utils/src/fallback_comm.cpp +src/transformer/ffn/ophost/ffn_proto.cpp +src/transformer/ffn/ophost/aclnn_ffn.h +src/transformer/ffn/ophost/aclnn_ffn_v2.h +src/transformer/ffn/ophost/aclnn_ffn_v3.h +src/transformer/ffn/ophost/aclnn_ffn.cpp +src/transformer/ffn/ophost/ffn.h +src/transformer/ffn/ophost/ffn.cpp +src/transformer/ffn/ophost/fallback_ffn.cpp +src/transformer/ffn/ophost/ffn_def.cpp +src/transformer/ffn/ffn_nonquant_nz.h +src/transformer/incre_flash_attention/ifa_public_define.h +src/transformer/incre_flash_attention/incre_flash_attention.cpp +src/transformer/incre_flash_attention/incre_flash_attention_allvec_new.h +src/transformer/incre_flash_attention/incre_flash_attention_split_Bbn2s2_Us2.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v2.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v3.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v3.h +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v4.cpp +src/transformer/incre_flash_attention/ophost/aclnn_incre_flash_attention_v4.h +src/transformer/incre_flash_attention/ophost/fallback_incre_flash_attention.cpp +src/transformer/incre_flash_attention/ophost/incre_flash_attention_def.cpp +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling.cc +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling.h +src/transformer/incre_flash_attention/ophost/incre_flash_attention_tiling_register.cc +src/transformer/prompt_flash_attention/kernel_data_copy_transpose.h +src/transformer/prompt_flash_attention/kernel_operator_softmax_compute_nz.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_inner.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_inner.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v2.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v2.h +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v3.cpp +src/transformer/prompt_flash_attention/ophost/aclnn_prompt_flash_attention_v3.h +src/transformer/prompt_flash_attention/ophost/fallback_prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_def.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention.h +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_proto.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_base_aclnn.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling.cpp +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling.h +src/transformer/prompt_flash_attention/ophost/prompt_flash_attention_tiling_register.cc +src/transformer/prompt_flash_attention/prompt_flash_attention.cpp +src/transformer/prompt_flash_attention/prompt_flash_attention_base.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tail.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tailWBNSD.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_no_tailWBNSD_KV_NZ.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_tail.h +src/transformer/prompt_flash_attention/prompt_flash_attention_bnstilling_n_s_tailWBNSD.h +src/transformer/prompt_flash_attention/prompt_flash_attention_empty_tensor.h +src/transformer/prompt_flash_attention/prompt_flash_attention_nz_kv_base.h +1 \ No newline at end of file diff --git a/tests/test b/tests/test new file mode 100644 index 0000000000000000000000000000000000000000..fc13291a2b95199a0e8140fa823d0bd6554fcdb1 --- /dev/null +++ b/tests/test @@ -0,0 +1,42 @@ +# Copyright (c) 2024 Huawei Technologies Co., Ltd. +# This file is a part of the CANN Open Software. +# Licensed under CANN Open Software License Agreement Version 1.0 (the "License"). +# Please refer to the License for details. You may not use this file except in compliance with the License. +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, +# INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. +# See LICENSE in the root of the software repository for the full text of the License. +# ====================================================================================================================== + +add_executable(test_incre_flash_attention + test_incre_flash_attention.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case + ACLNN_FUNC "aclnnIncreFlashAttention" +) + +add_executable(test_incre_flash_attention_v2 + test_incre_flash_attention_v2.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention_v2 + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case_v2 + ACLNN_FUNC "aclnnIncreFlashAttentionV2" +) + +add_executable(test_incre_flash_attention_v3 + test_incre_flash_attention_v3.cpp +) +add_execute_example( + TARGET_NAME test_incre_flash_attention_v3 + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/run_ifa_case.sh + TEST_CASE ifa_case_v3 + ACLNN_FUNC "aclnnIncreFlashAttentionV3" +) + +add_executable(test_incre_flash_attention_v4 + test_incre_flash_attention_v4.cpp +) \ No newline at end of file