diff --git a/build_helper.py b/build_helper.py index d76fc4b62a4aef380c60366b10caa139efce528a..ed0566c51841b7a28a7d507fedac74b8e2eb6951 100755 --- a/build_helper.py +++ b/build_helper.py @@ -66,6 +66,7 @@ def do_patch(patch_dir, target_dir): '0030-generate-flatbuffer-notice.patch', '0031-fix-matmul-assemble-can-not-protect-stack-in-mutil-thread.patch', '0032-fix-for-concat-bool-type.patch', + '0033-fix-nullptr-problems.patch', ] cwd = os.getcwd() diff --git a/patches/0033-fix-nullptr-problems.patch b/patches/0033-fix-nullptr-problems.patch new file mode 100644 index 0000000000000000000000000000000000000000..c6327f43e6d04cb3c060f73a49602f4815d0656e --- /dev/null +++ b/patches/0033-fix-nullptr-problems.patch @@ -0,0 +1,387 @@ +From a61c426b59ab25925e3f083292d9a2510e804f93 Mon Sep 17 00:00:00 2001 +From: rabbit-fgh <2955722401@qq.com> +Date: Wed, 21 Aug 2024 17:08:02 +0800 +Subject: [PATCH] fix nullptr + +--- + mindspore/lite/mindir/src/mindir.cc | 163 ++++++++++-------- + mindspore/lite/mindir/src/mindir_tensor.cc | 1 - + .../control_flow/kernel/identity_kernel.cc | 11 +- + .../lite/src/litert/cxx_api/model/model.cc | 12 +- + .../cast_gather_reduce_fusion_pass.cc | 1 + + .../reduce_concat_fusion_pass.cc | 1 + + .../split_reduce_concat_fusion_pass.cc | 1 + + mindspore/lite/src/litert/weight_decoder.cc | 2 + + 8 files changed, 111 insertions(+), 81 deletions(-) + +diff --git a/mindspore/lite/mindir/src/mindir.cc b/mindspore/lite/mindir/src/mindir.cc +index 39600e4d..93204fb8 100644 +--- a/mindspore/lite/mindir/src/mindir.cc ++++ b/mindspore/lite/mindir/src/mindir.cc +@@ -37,13 +37,16 @@ PrimitivePtr MindIR_Activation_CreatePrimitive(ActivationType activation_type, f + ActivationType MindIR_Activation_GetActivationType(ConstPrimitivePtr primitive) { + if (primitive != nullptr) { + auto prim = static_cast(primitive); ++ if (prim == nullptr) { ++ ActivationType en = static_cast(0); ++ return en; ++ } + auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- return static_cast(value->activation_type()); +- } else { ++ if (value == nullptr) { + ActivationType en = static_cast(0); + return en; + } ++ return static_cast(value->activation_type()); + } else { + ActivationType en = static_cast(0); + return en; +@@ -53,30 +56,34 @@ ActivationType MindIR_Activation_GetActivationType(ConstPrimitivePtr primitive) + void MindIR_Activation_SetActivationType(PrimitivePtr *primitive, ActivationType activation_type) { + if (primitive != nullptr && *primitive != nullptr) { + auto prim = static_cast(*primitive); +- auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- flatbuffers::FlatBufferBuilder fbb; +- auto ops_offset = +- schema::CreateActivation(fbb, static_cast(activation_type), value->alpha(), +- value->min_val(), value->max_val(), value->approximate()); +- auto prim_offset = +- schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); +- fbb.Finish(prim_offset); +- auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); +- auto ret_value = flatbuffers::GetMutableRoot(new_addr); +- *primitive = ret_value; ++ if(prim != nullptr){ ++ auto value = prim->value_as_Activation(); ++ if (value != nullptr) { ++ flatbuffers::FlatBufferBuilder fbb; ++ auto ops_offset = ++ schema::CreateActivation(fbb, static_cast(activation_type), value->alpha(), ++ value->min_val(), value->max_val(), value->approximate()); ++ auto prim_offset = ++ schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); ++ fbb.Finish(prim_offset); ++ auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); ++ auto ret_value = flatbuffers::GetMutableRoot(new_addr); ++ *primitive = ret_value; ++ } + } + } + } + float MindIR_Activation_GetAlpha(ConstPrimitivePtr primitive) { + if (primitive != nullptr) { + auto prim = static_cast(primitive); ++ if(prim == nullptr){ ++ return .0; ++ } + auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- return value->alpha(); +- } else { ++ if(value == nullptr){ + return .0; + } ++ return value->alpha(); + } else { + return .0; + } +@@ -85,29 +92,33 @@ float MindIR_Activation_GetAlpha(ConstPrimitivePtr primitive) { + void MindIR_Activation_SetAlpha(PrimitivePtr *primitive, float alpha) { + if (primitive != nullptr && *primitive != nullptr) { + auto prim = static_cast(*primitive); +- auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- flatbuffers::FlatBufferBuilder fbb; +- auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), +- alpha, value->min_val(), value->max_val(), value->approximate()); +- auto prim_offset = +- schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); +- fbb.Finish(prim_offset); +- auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); +- auto ret_value = flatbuffers::GetMutableRoot(new_addr); +- *primitive = ret_value; ++ if(prim != nullptr){ ++ auto value = prim->value_as_Activation(); ++ if (value != nullptr) { ++ flatbuffers::FlatBufferBuilder fbb; ++ auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), ++ alpha, value->min_val(), value->max_val(), value->approximate()); ++ auto prim_offset = ++ schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); ++ fbb.Finish(prim_offset); ++ auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); ++ auto ret_value = flatbuffers::GetMutableRoot(new_addr); ++ *primitive = ret_value; ++ } + } + } + } + float MindIR_Activation_GetMinVal(ConstPrimitivePtr primitive) { + if (primitive != nullptr) { + auto prim = static_cast(primitive); ++ if(prim == nullptr){ ++ return .0; ++ } + auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- return value->min_val(); +- } else { ++ if(value == nullptr){ + return .0; + } ++ return value->min_val(); + } else { + return .0; + } +@@ -116,29 +127,33 @@ float MindIR_Activation_GetMinVal(ConstPrimitivePtr primitive) { + void MindIR_Activation_SetMinVal(PrimitivePtr *primitive, float min_val) { + if (primitive != nullptr && *primitive != nullptr) { + auto prim = static_cast(*primitive); +- auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- flatbuffers::FlatBufferBuilder fbb; +- auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), +- value->alpha(), min_val, value->max_val(), value->approximate()); +- auto prim_offset = +- schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); +- fbb.Finish(prim_offset); +- auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); +- auto ret_value = flatbuffers::GetMutableRoot(new_addr); +- *primitive = ret_value; ++ if(prim != nullptr){ ++ auto value = prim->value_as_Activation(); ++ if (value != nullptr) { ++ flatbuffers::FlatBufferBuilder fbb; ++ auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), ++ value->alpha(), min_val, value->max_val(), value->approximate()); ++ auto prim_offset = ++ schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); ++ fbb.Finish(prim_offset); ++ auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); ++ auto ret_value = flatbuffers::GetMutableRoot(new_addr); ++ *primitive = ret_value; ++ } + } + } + } + float MindIR_Activation_GetMaxVal(ConstPrimitivePtr primitive) { + if (primitive != nullptr) { + auto prim = static_cast(primitive); ++ if(prim == nullptr){ ++ return .0; ++ } + auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- return value->max_val(); +- } else { ++ if(value == nullptr){ + return .0; + } ++ return value->max_val(); + } else { + return .0; + } +@@ -147,29 +162,33 @@ float MindIR_Activation_GetMaxVal(ConstPrimitivePtr primitive) { + void MindIR_Activation_SetMaxVal(PrimitivePtr *primitive, float max_val) { + if (primitive != nullptr && *primitive != nullptr) { + auto prim = static_cast(*primitive); +- auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- flatbuffers::FlatBufferBuilder fbb; +- auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), +- value->alpha(), value->min_val(), max_val, value->approximate()); +- auto prim_offset = +- schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); +- fbb.Finish(prim_offset); +- auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); +- auto ret_value = flatbuffers::GetMutableRoot(new_addr); +- *primitive = ret_value; ++ if(prim != nullptr){ ++ auto value = prim->value_as_Activation(); ++ if (value != nullptr) { ++ flatbuffers::FlatBufferBuilder fbb; ++ auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), ++ value->alpha(), value->min_val(), max_val, value->approximate()); ++ auto prim_offset = ++ schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); ++ fbb.Finish(prim_offset); ++ auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); ++ auto ret_value = flatbuffers::GetMutableRoot(new_addr); ++ *primitive = ret_value; ++ } + } + } + } + bool MindIR_Activation_GetApproximate(ConstPrimitivePtr primitive) { + if (primitive != nullptr) { + auto prim = static_cast(primitive); ++ if(prim == nullptr){ ++ return false; ++ } + auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- return value->approximate(); +- } else { ++ if(value == nullptr){ + return false; + } ++ return value->approximate(); + } else { + return false; + } +@@ -178,17 +197,19 @@ bool MindIR_Activation_GetApproximate(ConstPrimitivePtr primitive) { + void MindIR_Activation_SetApproximate(PrimitivePtr *primitive, bool approximate) { + if (primitive != nullptr && *primitive != nullptr) { + auto prim = static_cast(*primitive); +- auto value = prim->value_as_Activation(); +- if (prim != nullptr && value != nullptr) { +- flatbuffers::FlatBufferBuilder fbb; +- auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), +- value->alpha(), value->min_val(), value->max_val(), approximate); +- auto prim_offset = +- schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); +- fbb.Finish(prim_offset); +- auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); +- auto ret_value = flatbuffers::GetMutableRoot(new_addr); +- *primitive = ret_value; ++ if(prim != nullptr){ ++ auto value = prim->value_as_Activation(); ++ if (value != nullptr) { ++ flatbuffers::FlatBufferBuilder fbb; ++ auto ops_offset = schema::CreateActivation(fbb, static_cast(value->activation_type()), ++ value->alpha(), value->min_val(), value->max_val(), approximate); ++ auto prim_offset = ++ schema::CreatePrimitive(fbb, static_cast(NODE_TYPE_ACTIVATION), ops_offset.o); ++ fbb.Finish(prim_offset); ++ auto new_addr = MindIRMemoryManager::GetInstance()->CreatePrimitiveFromBuilder(fbb, prim); ++ auto ret_value = flatbuffers::GetMutableRoot(new_addr); ++ *primitive = ret_value; ++ } + } + } + } +diff --git a/mindspore/lite/mindir/src/mindir_tensor.cc b/mindspore/lite/mindir/src/mindir_tensor.cc +index 0e6a631e..80fe4392 100644 +--- a/mindspore/lite/mindir/src/mindir_tensor.cc ++++ b/mindspore/lite/mindir/src/mindir_tensor.cc +@@ -360,7 +360,6 @@ void MindIR_Tensor_Destroy(TensorPtr *tensor) { + MindIRMemoryManager::GetInstance()->DeleteTensor(schema); + *tensor = nullptr; + } +- *tensor = nullptr; + } + } // namespace lite + } // namespace mindspore +diff --git a/mindspore/lite/src/control_flow/kernel/identity_kernel.cc b/mindspore/lite/src/control_flow/kernel/identity_kernel.cc +index a43a5caf..21ef3808 100644 +--- a/mindspore/lite/src/control_flow/kernel/identity_kernel.cc ++++ b/mindspore/lite/src/control_flow/kernel/identity_kernel.cc +@@ -99,8 +99,13 @@ KernelExec *IdentityKernel::Create(std::vector in_tensors, std:: + auto lite_kernel = new IdentityKernel(param, in_tensors, out_tensors, ctx); + MS_CHECK_TRUE_MSG(lite_kernel != nullptr, nullptr, "new inner kernel failed."); + std::shared_ptr shared_kernel(lite_kernel); +- auto *kernel_exec = new KernelExec(shared_kernel); +- kernel_exec->set_context(ctx); +- return kernel_exec; ++ if(shared_kernel != nullptr){ ++ auto *kernel_exec = new KernelExec(shared_kernel); ++ kernel_exec->set_context(ctx); ++ return kernel_exec; ++ } else { ++ MS_LOG(ERROR) << "malloc shared_kernel failed."; ++ return nullptr; ++ } + } + } // namespace mindspore::kernel +diff --git a/mindspore/lite/src/litert/cxx_api/model/model.cc b/mindspore/lite/src/litert/cxx_api/model/model.cc +index 2814da41..282c752e 100644 +--- a/mindspore/lite/src/litert/cxx_api/model/model.cc ++++ b/mindspore/lite/src/litert/cxx_api/model/model.cc +@@ -297,6 +297,12 @@ Status Model::Build(const std::vector &model_path, ModelType model_type, + + Status Model::Build(GraphCell graph, const std::shared_ptr &model_context, + const std::shared_ptr &train_cfg) { ++std::stringstream err_msg; ++if (model_context == nullptr) { ++ err_msg << "Invalid null context."; ++ MS_LOG(ERROR) << err_msg.str(); ++ return Status(kLiteNullptr, err_msg.str()); ++} + #ifdef ENABLE_HI_APP_EVENT + uint64_t begin_time = mindspore::lite::HiAppEventConfig::GetInstance()->GetTimeMs(); + std::string devices; +@@ -304,7 +310,6 @@ Status Model::Build(GraphCell graph, const std::shared_ptr &model_conte + devices += std::to_string(device->GetDeviceType()) + " "; + } + #endif +- std::stringstream err_msg; + if (impl_ == nullptr) { + MS_LOG(ERROR) << "Model implement is null."; + return kLiteNullptr; +@@ -315,11 +320,6 @@ Status Model::Build(GraphCell graph, const std::shared_ptr &model_conte + MS_LOG(ERROR) << err_msg.str(); + return Status(kLiteNullptr, err_msg.str()); + } +- if (model_context == nullptr) { +- err_msg << "Invalid null context."; +- MS_LOG(ERROR) << err_msg.str(); +- return Status(kLiteNullptr, err_msg.str()); +- } + #if defined(ENABLE_PRE_INFERENCE) && defined(__linux__) && !defined(Debug) + if (lite::GetNumThreads() == lite::kSingleThread && impl_->IsEnablePreInference()) { + pid_t pid = fork(); +diff --git a/mindspore/lite/src/litert/pass/online_fusion/cast_gather_reduce_fusion_pass.cc b/mindspore/lite/src/litert/pass/online_fusion/cast_gather_reduce_fusion_pass.cc +index 4892292c..00186dc7 100644 +--- a/mindspore/lite/src/litert/pass/online_fusion/cast_gather_reduce_fusion_pass.cc ++++ b/mindspore/lite/src/litert/pass/online_fusion/cast_gather_reduce_fusion_pass.cc +@@ -127,6 +127,7 @@ int CastGatherReduceOnlineFusionPass::CreateCastGatherReduceCustomNode(LiteGraph + (void)memcpy(prim, fbb.GetBufferPointer(), fbb.GetSize()); + auto online_fusion_prim = flatbuffers::GetRoot(prim); + if (online_fusion_prim == nullptr) { ++ free(prim) + MS_LOG(ERROR) << "GetRoot CastGatherReduceFusion primitive failed."; + return RET_ERROR; + } +diff --git a/mindspore/lite/src/litert/pass/online_fusion/reduce_concat_fusion_pass.cc b/mindspore/lite/src/litert/pass/online_fusion/reduce_concat_fusion_pass.cc +index 5cfec79b..21115cc5 100644 +--- a/mindspore/lite/src/litert/pass/online_fusion/reduce_concat_fusion_pass.cc ++++ b/mindspore/lite/src/litert/pass/online_fusion/reduce_concat_fusion_pass.cc +@@ -135,6 +135,7 @@ int ReduceConcatOnlineFusionPass::CreateReduceConcatCustomNode(LiteGraph::Node * + (void)memcpy(prim, fbb.GetBufferPointer(), fbb.GetSize()); + auto online_fusion_prim = flatbuffers::GetRoot(prim); + if (online_fusion_prim == nullptr) { ++ free(prim) + MS_LOG(ERROR) << "GetRoot ReduceConcatFusion primitive failed."; + return RET_ERROR; + } +diff --git a/mindspore/lite/src/litert/pass/online_fusion/split_reduce_concat_fusion_pass.cc b/mindspore/lite/src/litert/pass/online_fusion/split_reduce_concat_fusion_pass.cc +index e6f1353f..f9a08a04 100644 +--- a/mindspore/lite/src/litert/pass/online_fusion/split_reduce_concat_fusion_pass.cc ++++ b/mindspore/lite/src/litert/pass/online_fusion/split_reduce_concat_fusion_pass.cc +@@ -233,6 +233,7 @@ int SplitReduceConcatOnlineFusionPass::CreateCustomNode(LiteGraph::Node *node, S + (void)memcpy(prim, fbb.GetBufferPointer(), fbb.GetSize()); + auto online_fusion_prim = flatbuffers::GetRoot(prim); + if (online_fusion_prim == nullptr) { ++ free(prim) + MS_LOG(ERROR) << "GetRoot SplitReduceConcatFusion primitive failed."; + return RET_ERROR; + } +diff --git a/mindspore/lite/src/litert/weight_decoder.cc b/mindspore/lite/src/litert/weight_decoder.cc +index d851ea1f..7872b83f 100644 +--- a/mindspore/lite/src/litert/weight_decoder.cc ++++ b/mindspore/lite/src/litert/weight_decoder.cc +@@ -177,6 +177,8 @@ int WeightDecoder::UnPackToInt(const SchemaTensorWrapper &src_tensor, lite::Tens + int WeightDecoder::UnPack(const SchemaTensorWrapper &src_tensor, lite::Tensor *dst_tensor) { + MS_ASSERT(src_tensor.handler() != nullptr); + MS_ASSERT(src_tensor.data() != nullptr); ++ MS_CHECK_TRUE_MSG(src_tensor.handler()->dims() != nullptr, RET_ERROR, "dims is nullptr"); ++ MS_CHECK_TRUE_MSG(src_tensor.handler()->name() != nullptr, RET_ERROR, "name is nullptr"); + STATUS ret = RET_OK; + if (src_tensor.handler()->enableHuffmanCode()) { + ret = WeightDecoder::DecodeHuffmanCode(src_tensor, dst_tensor); +-- +2.45.1.windows.1 +