From 7f913fc409348c637516327cce831e5f2c684e35 Mon Sep 17 00:00:00 2001 From: w30052974 Date: Mon, 22 Jul 2024 20:08:12 +0800 Subject: [PATCH 1/2] =?UTF-8?q?=E4=BF=AE=E6=94=B9dllite=20service=20modelI?= =?UTF-8?q?d?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: w30052974 --- frameworks/native/neural_network_core/compiler.h | 4 ++++ .../neural_network_core/neural_network_core.cpp | 11 ++++------- frameworks/native/neural_network_core/nnrt_client.h | 1 + .../native/neural_network_runtime/nncompiler.cpp | 6 ++++++ frameworks/native/neural_network_runtime/nncompiler.h | 1 + 5 files changed, 16 insertions(+), 7 deletions(-) diff --git a/frameworks/native/neural_network_core/compiler.h b/frameworks/native/neural_network_core/compiler.h index 3b946ff..bb221a9 100644 --- a/frameworks/native/neural_network_core/compiler.h +++ b/frameworks/native/neural_network_core/compiler.h @@ -46,6 +46,10 @@ public: virtual OH_NN_ReturnCode SetExtensionConfig(const std::unordered_map>& configs) = 0; virtual OH_NN_ReturnCode SetOptions(const std::vector>& options) = 0; + virtual OH_NN_ReturnCode GetModelName(std::string& modelName) + { + return OH_NN_UNSUPPORTED; + } }; } // namespace NeuralNetworkRuntime } // namespace OHOS diff --git a/frameworks/native/neural_network_core/neural_network_core.cpp b/frameworks/native/neural_network_core/neural_network_core.cpp index 2291a1a..d78f5c8 100644 --- a/frameworks/native/neural_network_core/neural_network_core.cpp +++ b/frameworks/native/neural_network_core/neural_network_core.cpp @@ -652,13 +652,15 @@ OH_NN_ReturnCode GetModelId(Compilation** compilation) return OH_NN_INVALID_PARAMETER; } - int ret = static_cast(OH_NN_SUCCESS); if (compilationImpl->nnModel != nullptr) { compilationImpl->nnrtModelID = nnrtService.GetNNRtModelIDFromModel(compilationImpl->nnModel); } else if (compilationImpl->offlineModelPath != nullptr) { compilationImpl->nnrtModelID = nnrtService.GetNNRtModelIDFromPath(compilationImpl->offlineModelPath); } else if (compilationImpl->cachePath != nullptr) { - compilationImpl->nnrtModelID = nnrtService.GetNNRtModelIDFromPath(compilationImpl->cachePath); + std::string modelName; + compilationImpl->compiler->GetModelName(modelName); + compilationImpl->nnrtModelID = + nnrtService.GetNNRtModelIDFromCache(compilationImpl->cachePath, modelName.c_str()); } else if ((compilationImpl->offlineModelBuffer.first != nullptr) && \ (compilationImpl->offlineModelBuffer.second != size_t(0))) { compilationImpl->nnrtModelID = nnrtService.GetNNRtModelIDFromBuffer( @@ -672,11 +674,6 @@ OH_NN_ReturnCode GetModelId(Compilation** compilation) return OH_NN_INVALID_PARAMETER; } - if (ret != static_cast(OH_NN_SUCCESS)) { - LOGE("GetModelId failed, some error happened when set modelId."); - return OH_NN_INVALID_PARAMETER; - } - return OH_NN_SUCCESS; } diff --git a/frameworks/native/neural_network_core/nnrt_client.h b/frameworks/native/neural_network_core/nnrt_client.h index e26018d..d407366 100644 --- a/frameworks/native/neural_network_core/nnrt_client.h +++ b/frameworks/native/neural_network_core/nnrt_client.h @@ -30,6 +30,7 @@ public: int (*CheckModelSizeFromBuffer)(const void* buffer, size_t size, bool& exceedLimit) = nullptr; int (*CheckModelSizeFromModel)(void* model, bool& exceedLimit) = nullptr; size_t (*GetNNRtModelIDFromPath)(const char*) = nullptr; + size_t (*GetNNRtModelIDFromCache)(const char* path, const char* modelName) = nullptr; size_t (*GetNNRtModelIDFromBuffer)(const void* buffer, size_t size) = nullptr; size_t (*GetNNRtModelIDFromModel)(void* model) = nullptr; int (*SetModelID)(int callingPid, uint32_t hiaimodelID, size_t nnrtModelID) = nullptr; diff --git a/frameworks/native/neural_network_runtime/nncompiler.cpp b/frameworks/native/neural_network_runtime/nncompiler.cpp index 5562ec6..f013b2a 100644 --- a/frameworks/native/neural_network_runtime/nncompiler.cpp +++ b/frameworks/native/neural_network_runtime/nncompiler.cpp @@ -728,6 +728,12 @@ OH_NN_ReturnCode NNCompiler::SetOptions(const std::vector> return OH_NN_UNSUPPORTED; } +OH_NN_ReturnCode NNCompiler::GetModelName(std::string& modelName) +{ + modelName = m_extensionConfig.modelName; + return OH_NN_SUCCESS; +} + NNExecutor* NNCompiler::CreateExecutor() { if (m_device == nullptr) { diff --git a/frameworks/native/neural_network_runtime/nncompiler.h b/frameworks/native/neural_network_runtime/nncompiler.h index a1b82ad..e280359 100644 --- a/frameworks/native/neural_network_runtime/nncompiler.h +++ b/frameworks/native/neural_network_runtime/nncompiler.h @@ -51,6 +51,7 @@ public: OH_NN_ReturnCode SetExtensionConfig(const std::unordered_map>& configs) override; OH_NN_ReturnCode SetOptions(const std::vector>& options) override; + OH_NN_ReturnCode GetModelName(std::string& modelName) override; NNExecutor* CreateExecutor(); -- Gitee From e72c6ba51bd64318bcf2c0e66f4fb2cd97f617b5 Mon Sep 17 00:00:00 2001 From: w30052974 Date: Mon, 22 Jul 2024 20:12:43 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E4=BF=AE=E6=94=B9dllite=20service=20modelI?= =?UTF-8?q?d?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: w30052974 --- frameworks/native/neural_network_core/nnrt_client.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/frameworks/native/neural_network_core/nnrt_client.cpp b/frameworks/native/neural_network_core/nnrt_client.cpp index f6aed28..3bd1774 100644 --- a/frameworks/native/neural_network_core/nnrt_client.cpp +++ b/frameworks/native/neural_network_core/nnrt_client.cpp @@ -59,6 +59,7 @@ NNRtServiceApi& NNRtServiceApi::GetInstance() LoadFunction(libNNRtService, "CheckModelSizeFromBuffer", &nnrtService.CheckModelSizeFromBuffer); LoadFunction(libNNRtService, "CheckModelSizeFromModel", &nnrtService.CheckModelSizeFromModel); LoadFunction(libNNRtService, "GetNNRtModelIDFromPath", &nnrtService.GetNNRtModelIDFromPath); + LoadFunction(libNNRtService, "GetNNRtModelIDFromCache", &nnrtService.GetNNRtModelIDFromCache); LoadFunction(libNNRtService, "GetNNRtModelIDFromBuffer", &nnrtService.GetNNRtModelIDFromBuffer); LoadFunction(libNNRtService, "GetNNRtModelIDFromModel", &nnrtService.GetNNRtModelIDFromModel); LoadFunction(libNNRtService, "SetModelID", &nnrtService.SetModelID); -- Gitee