From 43fd2d3dafc625947d249ea3e954c0b5611520bb Mon Sep 17 00:00:00 2001 From: zheng-kunfang <1845958389@qq.com> Date: Wed, 13 Aug 2025 14:14:03 +0800 Subject: [PATCH] delete loadtime Signed-off-by: zheng-kunfang <1845958389@qq.com> --- .../native/neural_network_runtime/nnexecutor.cpp | 13 +++++-------- .../native/neural_network_runtime/nnexecutor.h | 1 - 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/frameworks/native/neural_network_runtime/nnexecutor.cpp b/frameworks/native/neural_network_runtime/nnexecutor.cpp index eaae366..65554fd 100644 --- a/frameworks/native/neural_network_runtime/nnexecutor.cpp +++ b/frameworks/native/neural_network_runtime/nnexecutor.cpp @@ -146,8 +146,7 @@ NNExecutor::NNExecutor(size_t backendID, std::shared_ptr device, std::sh m_extensionConfig(extensionConfig), m_enableFp16(enableFp16), m_performance(performance), - m_priority(priority), - m_loadtime(std::chrono::steady_clock::now()) { + m_priority(priority) { m_executorid = GenRandom(); m_autoUnloadRunner = OHOS::AppExecFwk::EventRunner::Create ("nnexecutor_autounload" + std::to_string(m_executorid)); @@ -646,7 +645,6 @@ OH_NN_ReturnCode NNExecutor::RunSync(NN_Tensor* inputTensors[], size_t inputSize auto AutoUnloadTask = [this]() { DeinitModel("DelayUnload"); }; - m_loadtime = std::chrono::steady_clock::now(); m_autoUnloadHandler->PostTask(AutoUnloadTask, "nnexecutor_autounload" + std::to_string(m_executorid), AUTOUNLOAD_TIME); @@ -1659,14 +1657,13 @@ bool NNExecutor::DeinitModel(std::string mode) LOGW("DeinitScheduling failed, some error happen when DeinitScheduling model."); } m_preparedModel.reset(); - std::chrono::duration duration = std::chrono::steady_clock::now() - m_loadtime; if (mode == "FrozenDeinit") { m_autoUnloadHandler->RemoveTask("nnexecutor_autounload" + std::to_string(m_executorid)); - LOGI("FrozenDeinit pid=%{public}d originHiaiModelId=%{public}d hiaiModelId=%{public}d time=%{public}f", - getpid(), m_originHiaiModelId, modelId, duration.count()); + LOGI("FrozenDeinit pid=%{public}d originHiaiModelId=%{public}d hiaiModelId=%{public}d", + getpid(), m_originHiaiModelId, modelId); } else { - LOGI("AutoUnload pid=%{public}d originHiaiModelId=%{public}d hiaiModelId=%{public}d time=%{public}f", - getpid(), m_originHiaiModelId, modelId, duration.count()); + LOGI("AutoUnload pid=%{public}d originHiaiModelId=%{public}d hiaiModelId=%{public}d", + getpid(), m_originHiaiModelId, modelId); } } diff --git a/frameworks/native/neural_network_runtime/nnexecutor.h b/frameworks/native/neural_network_runtime/nnexecutor.h index a04e9b0..cc2b7dd 100644 --- a/frameworks/native/neural_network_runtime/nnexecutor.h +++ b/frameworks/native/neural_network_runtime/nnexecutor.h @@ -145,7 +145,6 @@ private: std::shared_ptr m_autoUnloadHandler; uint64_t m_executorid; std::mutex m_mutex; - std::chrono::time_point m_loadtime; }; } // namespace NeuralNetworkRuntime } // namespace OHOS -- Gitee