From fc8c1a59e8dac3e1a6ae08ce47b08cb016a6598a Mon Sep 17 00:00:00 2001 From: maoyong Date: Tue, 27 Feb 2024 11:08:32 +0800 Subject: [PATCH 1/3] fix layernorm bug Signed-off-by: maoyong --- .../neural_network_runtime/ops/layernorm_builder.cpp | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp index 95240e8..d50124a 100644 --- a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp +++ b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp @@ -27,6 +27,7 @@ static const int OUTPUT_NUM = 1; static const int INPUT_X = 0; static const int INPUT_GAMMA = 1; static const int INPUT_BETA = 2; +static const int NORMALIZE_SIZE_INIT = 1; static const std::string OP_NAME = "LayerNorm"; LayerNormBuilder::LayerNormBuilder() {} @@ -186,6 +187,16 @@ OH_NN_ReturnCode LayerNormBuilder::ValidateGammaAndBetaShape(const std::vectorGetDimensions(); int inputShapeSize = static_cast(inputShape.size()); + if (gammaShape.size() != static_cast(inputShapeSize - NORMALIZE_SIZE_INIT - beginAxis)) { + LOGE("[LayerNormBuilder] Invalid gamma dimension, gamma dimension should be equal to normalized dimension."); + return OH_NN_INVALID_PARAMETER; + } + + if (betaShape.size() != static_cast(inputShapeSize - NORMALIZE_SIZE_INIT - beginAxis)) { + LOGE("[LayerNormBuilder] Invalid beta dimension, beta dimension should be equal to normalized dimension."); + return OH_NN_INVALID_PARAMETER; + } + for (auto i = beginAxis; i < inputShapeSize; i++) { if (gammaShape[i - beginAxis] != inputShape[i]) { LOGE("[LayerNormBuilder] Invalid gamma shape, gamma shape should equal to normalized shape."); -- Gitee From 6839d9b9e5ee614838e076a8384e11592e41a86b Mon Sep 17 00:00:00 2001 From: maoyong Date: Tue, 27 Feb 2024 16:39:20 +0800 Subject: [PATCH 2/3] fix codecheck bug Signed-off-by: maoyong --- .../native/neural_network_runtime/ops/layernorm_builder.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp index d50124a..b634dfa 100644 --- a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp +++ b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp @@ -186,7 +186,6 @@ OH_NN_ReturnCode LayerNormBuilder::ValidateGammaAndBetaShape(const std::vectorGetDimensions(); auto betaShape = allTensors[inputsIndex[INPUT_BETA]]->GetDimensions(); int inputShapeSize = static_cast(inputShape.size()); - if (gammaShape.size() != static_cast(inputShapeSize - NORMALIZE_SIZE_INIT - beginAxis)) { LOGE("[LayerNormBuilder] Invalid gamma dimension, gamma dimension should be equal to normalized dimension."); return OH_NN_INVALID_PARAMETER; -- Gitee From ab81ae06d6b08c14d0ab1291cb3ec4561557380e Mon Sep 17 00:00:00 2001 From: maoyong Date: Tue, 27 Feb 2024 19:07:40 +0800 Subject: [PATCH 3/3] fix layernorm bug Signed-off-by: maoyong --- .../native/neural_network_runtime/ops/layernorm_builder.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp index b634dfa..0e9b2ef 100644 --- a/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp +++ b/frameworks/native/neural_network_runtime/ops/layernorm_builder.cpp @@ -27,7 +27,6 @@ static const int OUTPUT_NUM = 1; static const int INPUT_X = 0; static const int INPUT_GAMMA = 1; static const int INPUT_BETA = 2; -static const int NORMALIZE_SIZE_INIT = 1; static const std::string OP_NAME = "LayerNorm"; LayerNormBuilder::LayerNormBuilder() {} @@ -186,12 +185,12 @@ OH_NN_ReturnCode LayerNormBuilder::ValidateGammaAndBetaShape(const std::vectorGetDimensions(); auto betaShape = allTensors[inputsIndex[INPUT_BETA]]->GetDimensions(); int inputShapeSize = static_cast(inputShape.size()); - if (gammaShape.size() != static_cast(inputShapeSize - NORMALIZE_SIZE_INIT - beginAxis)) { + if (gammaShape.size() != static_cast(inputShapeSize - beginAxis)) { LOGE("[LayerNormBuilder] Invalid gamma dimension, gamma dimension should be equal to normalized dimension."); return OH_NN_INVALID_PARAMETER; } - if (betaShape.size() != static_cast(inputShapeSize - NORMALIZE_SIZE_INIT - beginAxis)) { + if (betaShape.size() != static_cast(inputShapeSize - beginAxis)) { LOGE("[LayerNormBuilder] Invalid beta dimension, beta dimension should be equal to normalized dimension."); return OH_NN_INVALID_PARAMETER; } -- Gitee