From 76535ddd03399dddef681220cb7c2b46dcda1bd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 14:04:54 +0800 Subject: [PATCH 1/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E6=96=B0=E5=A2=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- .../common/v1_0/compilation_mock_idevice.cpp | 6 +- .../common/v1_0/executor_mock_device.cpp | 3 +- .../common/v2_0/compilation_mock_idevice.cpp | 4 +- test/unittest/components/BUILD.gn | 23 + .../device_registrar_test.cpp | 10 +- .../v1_0/inner_model/nn_tensor_desc_test.cpp | 220 +++ .../v1_0/inner_model/nn_validation_test.cpp | 36 + .../neural_network_core_test.cpp | 1328 +++++++++++++++++ .../neural_network_core_test.h | 6 + .../device_registrar_test.cpp | 10 +- 10 files changed, 1630 insertions(+), 16 deletions(-) create mode 100644 test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp diff --git a/test/unittest/common/v1_0/compilation_mock_idevice.cpp b/test/unittest/common/v1_0/compilation_mock_idevice.cpp index adc9ed5..2861376 100644 --- a/test/unittest/common/v1_0/compilation_mock_idevice.cpp +++ b/test/unittest/common/v1_0/compilation_mock_idevice.cpp @@ -177,13 +177,13 @@ OH_NN_ReturnCode HDIPreparedModelV1_0::ExportModelCache(std::vector& mod int bufferSize = 13; Buffer buffer; std::string aBuffer = "mock_buffer_a"; - buffer.data = (void*)aBuffer.c_str(); + buffer.data = const_cast(static_cast(aBuffer.c_str())); buffer.length = bufferSize; modelCache.emplace_back(buffer); Buffer buffer2; std::string bBuffer = "mock_buffer_b"; - buffer2.data = (void*)bBuffer.c_str(); + buffer2.data = const_cast(static_cast(bBuffer.c_str())); buffer2.length = bufferSize; modelCache.emplace_back(buffer2); @@ -202,7 +202,7 @@ void* HDIDeviceV1_0::AllocateBuffer(size_t length) return nullptr; } - void* buffer = (void*)malloc(length); + void* buffer = malloc(length); if (buffer == nullptr) { LOGE("HDIDeviceV1_0 mock AllocateBuffer failed, the buffer is nullptr"); return nullptr; diff --git a/test/unittest/common/v1_0/executor_mock_device.cpp b/test/unittest/common/v1_0/executor_mock_device.cpp index 1ae76a0..a3e9c59 100644 --- a/test/unittest/common/v1_0/executor_mock_device.cpp +++ b/test/unittest/common/v1_0/executor_mock_device.cpp @@ -29,7 +29,7 @@ void* HDIDeviceV1_0::AllocateBuffer(size_t length) return nullptr; } - void* buffer = (void*)malloc(length); + void* buffer = malloc(length); if (buffer == nullptr) { LOGE("alloct buffer failed"); return nullptr; @@ -37,6 +37,7 @@ void* HDIDeviceV1_0::AllocateBuffer(size_t length) if (OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PARAMETER) { OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN; + free(buffer); return nullptr; } return buffer; diff --git a/test/unittest/common/v2_0/compilation_mock_idevice.cpp b/test/unittest/common/v2_0/compilation_mock_idevice.cpp index 8e0b3ec..8e70e0e 100644 --- a/test/unittest/common/v2_0/compilation_mock_idevice.cpp +++ b/test/unittest/common/v2_0/compilation_mock_idevice.cpp @@ -177,13 +177,13 @@ OH_NN_ReturnCode HDIPreparedModelV2_0::ExportModelCache(std::vector& mod int bufferSize = 13; Buffer buffer; std::string aBuffer = "mock_buffer_a"; - buffer.data = (void*)aBuffer.c_str(); + buffer.data = const_cast(static_cast(aBuffer.c_str())); buffer.length = bufferSize; modelCache.emplace_back(buffer); Buffer buffer2; std::string bBuffer = "mock_buffer_b"; - buffer2.data = (void*)bBuffer.c_str(); + buffer2.data = const_cast(static_cast(bBuffer.c_str())); buffer2.length = bufferSize; modelCache.emplace_back(buffer2); diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 777368d..375f56e 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -213,6 +213,28 @@ ohos_unittest("NeuralNetworkCoreV1_0Test") { ] } +ohos_unittest("NnTensorDescV1_0Test") { + module_out_path = module_output_path + + sources = [ "./v1_0/inner_model/nn_tensor_desc_test.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hdf_core:libhdf_utils", + "hilog:libhilog", + "hitrace:libhitracechain", + ] +} + ohos_unittest("QuantParamsTest") { module_out_path = module_output_path @@ -755,6 +777,7 @@ group("components_unittest") { ":NNCompiledCacheTest", ":NNCompilerTest", ":NeuralNetworkCoreV1_0Test", + ":NnTensorDescV1_0Test", ":NeuralNetworkRuntimeV1_0Test", ":NeuralNetworkRuntimeV2_0Test", ":NnTensorV1_0Test", diff --git a/test/unittest/components/v1_0/device_registrar/device_registrar_test.cpp b/test/unittest/components/v1_0/device_registrar/device_registrar_test.cpp index df57a11..95d85bb 100644 --- a/test/unittest/components/v1_0/device_registrar/device_registrar_test.cpp +++ b/test/unittest/components/v1_0/device_registrar/device_registrar_test.cpp @@ -192,22 +192,22 @@ sptr IRegisterDevice::Get(const std::string& serviceName, bool return nullptr; } - sptr mockIDevice = sptr(new (std::nothrow) MockIDeviceImp()); - if (mockIDevice.GetRefPtr() == nullptr) { + auto mockIDevice = std::make_shared(); + if (!mockIDevice) { LOGE("Failed to new MockIDeviceImp object."); return nullptr; } std::string deviceName = "MockIDeviceA"; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetDeviceName(::testing::_)) + EXPECT_CALL(*mockIDevice, GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(HDF_SUCCESS))); std::string vendorName = "MockVendorA"; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetVendorName(::testing::_)) + EXPECT_CALL(*mockIDevice, GetVendorName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(vendorName), ::testing::Return(HDF_SUCCESS))); V1_0::DeviceStatus deviceStatus = V1_0::DeviceStatus::AVAILABLE; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetDeviceStatus(::testing::_)) + EXPECT_CALL(*mockIDevice, GetDeviceStatus(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceStatus), ::testing::Return(HDF_SUCCESS))); return mockIDevice; } diff --git a/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp b/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp new file mode 100644 index 0000000..e2c2c12 --- /dev/null +++ b/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "validation.h" +#include "tensor_desc.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; +using namespace OHOS::NeuralNetworkRuntime::Validation; + +namespace NNRT { +namespace UnitTest { +class NnTensorDescTest : public testing::Test { +}; + +/** + * @tc.name: nn_get_datatype_001 + * @tc.desc: Verify the success of the GetDataType function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_datatype_001, TestSize.Level1) +{ + TensorDesc tensordesc; + OH_NN_DataType* testdatatype = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetDataType(testdatatype)); +} + +/** + * @tc.name: nn_get_datatype_002 + * @tc.desc: Verify the success of the GetDataType function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_datatype_002, TestSize.Level1) +{ + TensorDesc tensordesc; + OH_NN_DataType testdatatype = OH_NN_BOOL; + EXPECT_EQ(OH_NN_SUCCESS, tensordesc.GetDataType(&testdatatype)); +} + +/** + * @tc.name: nn_set_datatype_001 + * @tc.desc: Verify the success of the SetDataType function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_set_datatype_001, TestSize.Level1) +{ + TensorDesc tensordesc; + int dataTypeTest = 13; + OH_NN_DataType testdataType = (OH_NN_DataType)dataTypeTest; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.SetDataType(testdataType)); +} + +/** + * @tc.name: nn_get_format_001 + * @tc.desc: Verify the success of the GetFormat function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_format_001, TestSize.Level1) +{ + TensorDesc tensordesc; + OH_NN_Format* testformat = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetFormat(testformat)); +} + +/** + * @tc.name: nn_set_format_001 + * @tc.desc: Verify the success of the SetFormat function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_set_format_001, TestSize.Level1) +{ + TensorDesc tensordesc; + OH_NN_Format testformat = OH_NN_FORMAT_NCHW; + EXPECT_EQ(OH_NN_SUCCESS, tensordesc.SetFormat(testformat)); +} + +/** + * @tc.name: nn_get_shape_001 + * @tc.desc: Verify the success of the GetShape function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_shape_001, TestSize.Level1) +{ + TensorDesc tensordesc; + int32_t** testshape = nullptr; + size_t* testshapeNum = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetShape(testshape, testshapeNum)); +} + +/** + * @tc.name: nn_get_shape_002 + * @tc.desc: Verify the success of the GetShape function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_shape_002, TestSize.Level1) +{ + TensorDesc tensordesc; + int32_t shapDim[2] = {3, 3}; + int32_t* ptr = shapDim; + int32_t** testshape = &ptr; + size_t* testshapeNum = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetShape(testshape, testshapeNum)); +} + +/** + * @tc.name: nn_get_shape_002 + * @tc.desc: Verify the success of the GetShape function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_shape_003, TestSize.Level1) +{ + TensorDesc tensordesc; + int32_t** testshape = new int32_t*[1]; + testshape[0] = nullptr; + size_t* testshapenum = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetShape(testshape, testshapenum)); +} + +/** + * @tc.name: nn_set_shape_001 + * @tc.desc: Verify the success of the SetShape function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_set_shape_001, TestSize.Level1) +{ + TensorDesc tensordesc; + const int32_t* testshape = nullptr; + size_t testshapenum = 0; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.SetShape(testshape, testshapenum)); +} + +/** + * @tc.name: nn_set_shape_002 + * @tc.desc: Verify the success of the SetShape function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_set_shape_002, TestSize.Level1) +{ + TensorDesc tensordesc; + const int32_t testShape[] = {2, 3, 5}; + size_t shapenum = 0; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.SetShape(testShape, shapenum)); +} + +/** + * @tc.name: nn_get_elementnum_001 + * @tc.desc: Verify the success of the GetElementNum function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_elementnum_001, TestSize.Level1) +{ + TensorDesc tensordesc; + size_t* testelementNum = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetElementNum(testelementNum)); +} + +/** + * @tc.name: nn_get_bytesize_001 + * @tc.desc: Verify the success of the GetByteSize function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_bytesize_001, TestSize.Level1) +{ + TensorDesc tensordesc; + size_t* testbytesize = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetByteSize(testbytesize)); +} + +/** + * @tc.name: nn_get_bytesize_002 + * @tc.desc: Verify the success of the GetByteSize function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_bytesize_002, TestSize.Level1) +{ + TensorDesc tensordesc; + size_t* testbytesize = 0; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetByteSize(testbytesize)); +} + +/** + * @tc.name: nn_set_name_001 + * @tc.desc: Verify the success of the SetName function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_set_name_001, TestSize.Level1) +{ + TensorDesc tensordesc; + const char* testsetname = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.SetName(testsetname)); +} + +/** + * @tc.name: nn_get_name_001 + * @tc.desc: Verify the success of the GetName function + * @tc.type: FUNC + */ +HWTEST_F(NnTensorDescTest, nn_get_name_001, TestSize.Level1) +{ + TensorDesc tensordesc; + const char** testgetname = nullptr; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.GetName(testgetname)); +} +} // namespace UnitTest +} // namespace NNRT diff --git a/test/unittest/components/v1_0/inner_model/nn_validation_test.cpp b/test/unittest/components/v1_0/inner_model/nn_validation_test.cpp index 1fdff2f..5adce77 100644 --- a/test/unittest/components/v1_0/inner_model/nn_validation_test.cpp +++ b/test/unittest/components/v1_0/inner_model/nn_validation_test.cpp @@ -64,6 +64,18 @@ HWTEST_F(NnValidationTest, nn_validation_validate_tensor_datatype_003, TestSize. EXPECT_EQ(false, ValidateTensorDataType(dataType)); } +/** + * @tc.name: nn_validation_validate_tensor_Format_001 + * @tc.desc: Verify the success of the validate_tensor_datatype function + * @tc.type: FUNC + */ +HWTEST_F(NnValidationTest, nn_validation_validate_tensor_Format_001, TestSize.Level1) +{ + int forMatTest = 2; + OH_NN_Format forMat = (OH_NN_Format)forMatTest; + EXPECT_EQ(true, ValidateTensorFormat(forMat)); +} + /** * @tc.name: nn_validation_validate_preformance_mode_001 * @tc.desc: Verify the success of the validate_preformance_mode function @@ -171,5 +183,29 @@ HWTEST_F(NnValidationTest, nn_validation_fusetype_003, TestSize.Level1) OH_NN_FuseType fuseType = (OH_NN_FuseType)fuseTypeTest; EXPECT_EQ(false, ValidateFuseType(fuseType)); } + +/** + * @tc.name: nn_validation_tensortype_001 + * @tc.desc: Verify the success of the validate_fusetype function + * @tc.type: FUNC + */ +HWTEST_F(NnValidationTest, nn_validation_tensortype_001, TestSize.Level1) +{ + int tensorTyepTest = 1; + OH_NN_TensorType tensorTyep = (OH_NN_TensorType)tensorTyepTest; + EXPECT_EQ(true, ValidateTensorType(tensorTyep)); +} + +/** + * @tc.name: nn_validation_tensortype_002 + * @tc.desc: Verify the success of the validate_fusetype function + * @tc.type: FUNC + */ +HWTEST_F(NnValidationTest, nn_validation_tensortype_002, TestSize.Level1) +{ + int tensorTyepTest = 163; + OH_NN_TensorType tensorTyep = (OH_NN_TensorType)tensorTyepTest; + EXPECT_EQ(false, ValidateTensorType(tensorTyep)); +} } // namespace UnitTest } // namespace NNRT diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp index c72af22..aa26cde 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp @@ -16,9 +16,13 @@ #include #include +#include "nnbackend.h" #include "common/utils.h" #include "neural_network_core_test.h" #include "compilation.h" +#include "tensor.h" +#include "device.h" +#include "common/log.h" #include "interfaces/kits/c/neural_network_runtime/neural_network_core.h" namespace OHOS { @@ -93,6 +97,41 @@ OH_NN_ReturnCode NeuralNetworkCoreTest::BuildModel(InnerModel& model) return ret; } +class MockIDevice : public Device { +public: + MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&)); + MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&)); + MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr, + std::vector&)); + MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector&, + const ModelConfig&, + std::shared_ptr&, + bool&)); + MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD1(AllocateBuffer, void*(size_t)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*)); + MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&)); + MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t)); +}; + /* * @tc.name: alldevicesid_001 * @tc.desc: Verify the allDeviceIds is nullptr of the OH_NNDevice_GetAllDevicesID function. @@ -210,6 +249,34 @@ HWTEST_F(NeuralNetworkCoreTest, device_get_type_002, testing::ext::TestSize.Leve EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } +/* + * @tc.name: device_get_type_003 + * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, device_get_type_003, testing::ext::TestSize.Level0) +{ + size_t deviceID = 1; + OH_NN_DeviceType deviceType = OH_NN_OTHERS; + OH_NN_DeviceType* pDeviceType = &deviceType; + OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: device_get_type_004 + * @tc.desc: Verify the success of the OH_NNDevice_GetType function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, device_get_type_004, testing::ext::TestSize.Level0) +{ + size_t deviceID = 1; + OH_NN_DeviceType deviceType = OH_NN_CPU; + OH_NN_DeviceType* pDeviceType = &deviceType; + OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + /* * @tc.name: compilation_construct_001 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function. @@ -282,6 +349,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_constructforcache_001, testing::ext: OH_NNCompilation* ret = OH_NNCompilation_ConstructForCache(); Compilation *compilation = new (std::nothrow) Compilation(); OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + delete compilation; EXPECT_NE(nnCompilation, ret); } @@ -313,6 +381,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_002, testing::e size_t length = 0; size_t* modelSize = nullptr; OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -329,6 +398,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_003, testing::e size_t length = 0; size_t* modelSize = nullptr; OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -345,6 +415,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_004, testing::e size_t length = 0; size_t* modelSize = nullptr; OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -360,6 +431,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_005, testing::e char buffer[SIZE_ONE]; size_t* modelSize = nullptr; OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, SIZE_ONE, modelSize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -375,6 +447,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_006, testing::e char buffer[SIZE_ONE]; size_t modelSize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, SIZE_ONE, &modelSize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -404,6 +477,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_002, testing:: const void* buffer = nullptr; size_t modelsize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, modelsize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -419,6 +493,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_003, testing:: char buffer[SIZE_ONE]; size_t modelsize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, modelsize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -433,6 +508,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_004, testing:: OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); char buffer[SIZE_ONE]; OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, SIZE_ONE); + delete compilation; EXPECT_EQ(OH_NN_SUCCESS, ret); } @@ -464,6 +540,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_002, testing::ext const void* cofigvalue = nullptr; const size_t configvaluesize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -480,6 +557,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_003, testing::ext const void* cofigvalue = nullptr; const size_t configvaluesize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -496,6 +574,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_004, testing::ext char cofigvalue[SIZE_ONE]; const size_t configvaluesize = 0; OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize); + delete compilation; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -511,6 +590,7 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_005, testing::ext const char* configname = "ConfigName"; char cofigvalue[SIZE_ONE]; OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, SIZE_ONE); + delete compilation; EXPECT_EQ(OH_NN_SUCCESS, ret); } @@ -527,6 +607,21 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_set_device_001, testing::ext::TestSi EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } +/* + * @tc.name: compilation_set_device_002 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_device_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + size_t deviceId = 1; + OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId); + delete compilation; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + /* * @tc.name: compilation_set_cache_001 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. @@ -541,6 +636,148 @@ HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_001, testing::ext::TestSiz EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } +/* + * @tc.name: compilation_set_cache_002 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + const char* cacheDir = nullptr; + uint32_t version = 1; + OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version); + delete compilation; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: compilation_set_cache_003 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_003, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + const char* cacheDir = "../"; + uint32_t version = 1; + OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version); + delete compilation; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: compilation_set_performancemode_001 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_performancemode_001, testing::ext::TestSize.Level0) +{ + OH_NNCompilation* nnCompilation = nullptr; + OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE; + OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: compilation_set_performancemode_002 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_performancemode_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE; + OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode); + delete compilation; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: compilation_set_priority_001 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_priority_001, testing::ext::TestSize.Level0) +{ + OH_NNCompilation* nnCompilation = nullptr; + OH_NN_Priority priority = OH_NN_PRIORITY_NONE; + OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: compilation_set_priority_002 + * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_set_priority_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + OH_NN_Priority priority = OH_NN_PRIORITY_NONE; + OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority); + delete compilation; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: compilation_enablefloat16_001 + * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_enablefloat16_001, testing::ext::TestSize.Level0) +{ + OH_NNCompilation* nnCompilation = nullptr; + bool enableFloat16 = true; + OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: compilation_enablefloat16_002 + * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_enablefloat16_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + bool enableFloat16 = true; + OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16); + delete compilation; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: compilation_build_001 + * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_build_001, testing::ext::TestSize.Level0) +{ + OH_NNCompilation *nncompilation = nullptr; + OH_NN_ReturnCode ret = OH_NNCompilation_Build(nncompilation); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: compilation_build_002 + * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, compilation_build_002, testing::ext::TestSize.Level0) +{ + Compilation *compilation = new (std::nothrow) Compilation(); + OH_NNCompilation* nnCompilation = reinterpret_cast(compilation); + OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation); + delete compilation; + EXPECT_EQ(OH_NN_FAILED, ret); +} + /* * @tc.name: nnt_tensordesc_destroy_001 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_Destroy function. @@ -763,6 +1000,1097 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setshape_004, testing::ext::TestS OH_NN_ReturnCode ret = OH_NNTensorDesc_SetShape(tensorDesc, inputDims, shapeLength); EXPECT_EQ(OH_NN_SUCCESS, ret); } + +/* + * @tc.name: nnt_tensordesc_Getshape_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + int32_t* shape = nullptr; + size_t* shapeLength = 0; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_Getshape_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + int32_t* shape = nullptr; + size_t* shapeLength = 0; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_Getshape_003 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_003, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + int32_t* shape = nullptr; + int lengthValue = 1; + size_t* shapeLength = new size_t(lengthValue); + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength); + delete shapeLength; + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: nnt_tensordesc_setformat_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setformat_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + OH_NN_Format format = static_cast(OH_NN_FLOAT32); + OH_NN_ReturnCode ret = OH_NNTensorDesc_SetFormat(tensorDesc, format); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_setformat_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setformat_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + OH_NN_Format format = static_cast(OH_NN_FLOAT32); + OH_NN_ReturnCode ret = OH_NNTensorDesc_SetFormat(tensorDesc, format); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getformat_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + OH_NN_Format* format = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, format); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getformat_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + OH_NN_Format* format = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, format); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getformat_003 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_003, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + OH_NN_Format format = OH_NN_FORMAT_NONE; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, &format); + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + size_t* elementCount = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, elementCount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t* elementCount = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, elementCount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_003 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_003, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t elementCount = 0; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, &elementCount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + size_t* byteSize = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, byteSize); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t* byteSize = nullptr; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, byteSize); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_tensordesc_getelementcount_003 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_003, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t byteSize = 0; + OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, &byteSize); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_create_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_create_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + size_t deviceid = 0; + NN_Tensor* ret = OH_NNTensor_Create(deviceid, tensorDesc); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_create_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_create_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 1; + NN_Tensor* ret = OH_NNTensor_Create(deviceid, tensorDesc); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + size_t deviceid = 0; + size_t size = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithSize(deviceid, tensorDesc, size); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 0; + size_t size = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithSize(deviceid, tensorDesc, size); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_001 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_001, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = nullptr; + size_t deviceid = 0; + int fd = 0; + size_t size = 0; + size_t offset = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_002 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_002, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 0; + int fd = -1; + size_t size = 0; + size_t offset = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_003 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_003, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 0; + int fd = -1; + size_t size = 0; + size_t offset = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_004 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_004, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 0; + int fd = 1; + size_t size = 1; + size_t offset = 2; + NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_createwithsize_005 + * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_005, testing::ext::TestSize.Level0) +{ + NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); + size_t deviceid = 0; + int fd = 1; + size_t size = 1; + size_t offset = 0; + NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_destroy_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_destroy_001, testing::ext::TestSize.Level0) +{ + NN_Tensor* tensor = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_Destroy(&tensor); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_destroy_00 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_destroy_002, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + OH_NN_ReturnCode ret = OH_NNTensor_Destroy(&tensor); + EXPECT_EQ(OH_NN_NULL_PTR, ret); +} + +/* + * @tc.name: nnt_nntensor_gettensordesc_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_gettensordesc_001, testing::ext::TestSize.Level0) +{ + const NN_Tensor* tensor = nullptr; + NN_TensorDesc* ret = OH_NNTensor_GetTensorDesc(tensor); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_gettensordesc_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_gettensordesc_002, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + NN_TensorDesc* ret = OH_NNTensor_GetTensorDesc(tensor); + EXPECT_NE(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_getdatabuffer_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getdatabuffer_001, testing::ext::TestSize.Level0) +{ + const NN_Tensor* tensor = nullptr; + void* ret = OH_NNTensor_GetDataBuffer(tensor); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_getdatabuffer_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getdatabuffer_002, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + void* ret = OH_NNTensor_GetDataBuffer(tensor); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nntensor_getsize_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_001, testing::ext::TestSize.Level0) +{ + const NN_Tensor* tensor = nullptr; + size_t* size = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, size); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getsize_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_002, testing::ext::TestSize.Level0) +{ + size_t* size = nullptr; + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, size); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getsize_003 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_003, testing::ext::TestSize.Level0) +{ + size_t size = 1; + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, &size); + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: nnt_nntensor_getfd_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_001, testing::ext::TestSize.Level0) +{ + const NN_Tensor* tensor = nullptr; + int* fd = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, fd); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getfd_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_002, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + int* fd = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, fd); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getfd_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_003, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + int fd = 1; + OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, &fd); + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: nnt_nntensor_getoffset_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_001, testing::ext::TestSize.Level0) +{ + const NN_Tensor* tensor = nullptr; + size_t* offset = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, offset); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getoffset_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_002, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + size_t* offset = nullptr; + OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, offset); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nntensor_getoffset_003 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_003, testing::ext::TestSize.Level0) +{ + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + NN_Tensor* tensor = reinterpret_cast(hdiDevice->CreateTensor(tensorDesc)); + size_t offset = 1; + OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, &offset); + EXPECT_EQ(OH_NN_SUCCESS, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getputputshape_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getputputshape_001, testing::ext::TestSize.Level0) +{ + OH_NNExecutor* executor = nullptr; + uint32_t outputIndex = 0; + int32_t* shape = nullptr; + uint32_t* shapeLength = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputShape(executor, outputIndex, &shape, shapeLength); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getputputshape_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getputputshape_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + uint32_t outputIndex = 0; + int32_t* shape = nullptr; + uint32_t* shapeLength = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, &shape, shapeLength); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getputputshape_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputcount_001, testing::ext::TestSize.Level0) +{ + const OH_NNExecutor* executor = nullptr; + size_t* inputCount = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(executor, inputCount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getinputcount_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputcount_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + size_t* inputCount = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(nnExecutor, inputCount); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getoutputcount_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getoutputcount_001, testing::ext::TestSize.Level0) +{ + const OH_NNExecutor* executor = nullptr; + size_t* outputCount = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputCount(executor, outputCount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getoutputcount_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getoutputcount_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + size_t* outputCount = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputCount(nnExecutor, outputCount); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_createinputtensordesc_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_createinputtensordesc_001, testing::ext::TestSize.Level0) +{ + const OH_NNExecutor* executor = nullptr; + size_t index = 1; + NN_TensorDesc* ret = OH_NNExecutor_CreateInputTensorDesc(executor, index); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nnexecutor_createinputtensordesc_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_createouttensordesc_001, testing::ext::TestSize.Level0) +{ + const OH_NNExecutor* executor = nullptr; + size_t index = 1; + NN_TensorDesc* ret = OH_NNExecutor_CreateOutputTensorDesc(executor, index); + EXPECT_EQ(nullptr, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getoutputcount_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ext::TestSize.Level0) +{ + const OH_NNExecutor* executor = nullptr; + size_t index = 1; + size_t* minInputDims = nullptr; + size_t* maxInputDims = nullptr; + size_t* shapeLength = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(executor, index, &minInputDims, &maxInputDims, shapeLength); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_getoutputcount_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ + HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ext::TestSize.Level0) + { + size_t index = 1; + size_t* minInputDims = nullptr; + size_t* maxInputDims = nullptr; + size_t* shapeLength = nullptr; + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, &minInputDims, &maxInputDims, shapeLength); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); + } + + /* + * @tc.name: nnt_nnexecutor_getinputdimRange_003 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ + HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ext::TestSize.Level0) + { + size_t index = 1; + size_t mindims = 1; + size_t* minInputDims = &mindims; + size_t* maxInputDims = nullptr; + size_t* shapeLength = nullptr; + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, &minInputDims, &maxInputDims, shapeLength); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); + } + +/* + * @tc.name: nnt_nnexecutor_setonrundone_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonrundone_001, testing::ext::TestSize.Level0) +{ + OH_NNExecutor* executor = nullptr; + NN_OnRunDone rundone = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_SetOnRunDone(executor, rundone); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_setonrundone_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonrundone_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_OnRunDone rundone = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_SetOnRunDone(nnExecutor, rundone); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_setonservicedied_001 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonservicedied_001, testing::ext::TestSize.Level0) +{ + OH_NNExecutor* executor = nullptr; + NN_OnServiceDied servicedied = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_SetOnServiceDied(executor, servicedied); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_nnexecutor_setonservicedied_002 + * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonservicedied_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_OnServiceDied servicedied = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_SetOnServiceDied(nnExecutor, servicedied); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runsync_001 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_001, testing::ext::TestSize.Level0) +{ + OH_NNExecutor* executor = nullptr; + NN_Tensor* inputTensor[] = {nullptr}; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(executor, inputTensor, inputCount, outputTensor, outputcount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runsync_002 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[] = {nullptr}; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runsync_003 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_003, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runsync_004 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_004, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 1; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runsync_005 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_005, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 1; + NN_Tensor* outputTensor[sizetensor]; + size_t outputcount = 0; + OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_001 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_001, testing::ext::TestSize.Level0) +{ + OH_NNExecutor* executor = nullptr; + NN_Tensor* inputTensor[] = {nullptr}; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(executor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_002 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_002, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[] = {nullptr}; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_003 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_003, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_004 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_004, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 0; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_005 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_005, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 1; + NN_Tensor* outputTensor[] = {nullptr}; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_006 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_006, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 1; + NN_Tensor* outputTensor[sizetensor]; + size_t outputcount = 0; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} + +/* + * @tc.name: nnt_executor_runasync_007 + * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function. + * @tc.type: FUNC + */ +HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_007, testing::ext::TestSize.Level0) +{ + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + NN_Tensor* inputTensor[sizetensor]; + size_t inputCount = 1; + NN_Tensor* outputTensor[sizetensor]; + size_t outputcount = 1; + int32_t timeout = 1; + void* userdata = nullptr; + OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount, + timeout, userdata); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +} } // Unittest } // namespace NeuralNetworkRuntime } // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.h b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.h index dc64937..4c8d864 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.h +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.h @@ -40,7 +40,13 @@ public: OH_NN_UInt32Array m_outputIndices; OH_NN_UInt32Array m_paramIndices; OH_NN_Tensor m_tensor; + TensorDesc desc; + TensorDesc* tensorDesc = &desc; + Compilation ation; + Compilation* compilation = &ation; + size_t backendID = 1; + size_t sizetensor = 3; uint32_t m_inputIndexs[2] {0, 1}; uint32_t m_outputIndexs[1] {2}; uint32_t m_paramIndexs[1] {3}; diff --git a/test/unittest/components/v2_0/device_registrar/device_registrar_test.cpp b/test/unittest/components/v2_0/device_registrar/device_registrar_test.cpp index 9988bea..afda8de 100644 --- a/test/unittest/components/v2_0/device_registrar/device_registrar_test.cpp +++ b/test/unittest/components/v2_0/device_registrar/device_registrar_test.cpp @@ -192,22 +192,22 @@ sptr IRegisterDevice::Get(const std::string& serviceName, bool return nullptr; } - sptr mockIDevice = sptr(new (std::nothrow) MockIDeviceImp()); - if (mockIDevice.GetRefPtr() == nullptr) { + auto mockIDevice = std::make_shared(); + if (!mockIDevice) { LOGE("Failed to new MockIDeviceImp object."); return nullptr; } std::string deviceName = "MockIDeviceA"; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetDeviceName(::testing::_)) + EXPECT_CALL(*mockIDevice, GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(HDF_SUCCESS))); std::string vendorName = "MockVendorA"; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetVendorName(::testing::_)) + EXPECT_CALL(*mockIDevice, GetVendorName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(vendorName), ::testing::Return(HDF_SUCCESS))); V2_0::DeviceStatus deviceStatus = V2_0::DeviceStatus::AVAILABLE; - EXPECT_CALL(*((MockIDeviceImp *)mockIDevice.GetRefPtr()), GetDeviceStatus(::testing::_)) + EXPECT_CALL(*mockIDevice, GetDeviceStatus(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceStatus), ::testing::Return(HDF_SUCCESS))); return mockIDevice; } -- Gitee From 27e8b57686ed6985e94e9f432fd0971615fb040c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 14:53:36 +0800 Subject: [PATCH 2/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- test/unittest/components/BUILD.gn | 42 +++++++++---------- .../v1_0/inner_model/nn_tensor_desc_test.cpp | 2 +- .../neural_network_core_test.cpp | 10 +++-- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 375f56e..9e3ac94 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -213,10 +213,10 @@ ohos_unittest("NeuralNetworkCoreV1_0Test") { ] } -ohos_unittest("NnTensorDescV1_0Test") { +ohos_unittest("QuantParamsTest") { module_out_path = module_output_path - sources = [ "./v1_0/inner_model/nn_tensor_desc_test.cpp" ] + sources = [ "./quant_param/quant_param_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -227,18 +227,16 @@ ohos_unittest("NnTensorDescV1_0Test") { ] external_deps = [ - "c_utils:utils", "drivers_interface_nnrt:libnnrt_proxy_1.0", - "hdf_core:libhdf_utils", "hilog:libhilog", "hitrace:libhitracechain", ] } -ohos_unittest("QuantParamsTest") { +ohos_unittest("NNBackendTest") { module_out_path = module_output_path - sources = [ "./quant_param/quant_param_test.cpp" ] + sources = [ "./nn_backend/nn_backend_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -255,10 +253,10 @@ ohos_unittest("QuantParamsTest") { ] } -ohos_unittest("NNBackendTest") { +ohos_unittest("NNCompiledCacheTest") { module_out_path = module_output_path - sources = [ "./nn_backend/nn_backend_test.cpp" ] + sources = [ "./nn_compiled_cache/nn_compiled_cache_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -275,10 +273,10 @@ ohos_unittest("NNBackendTest") { ] } -ohos_unittest("NNCompiledCacheTest") { +ohos_unittest("NNCompilerTest") { module_out_path = module_output_path - sources = [ "./nn_compiled_cache/nn_compiled_cache_test.cpp" ] + sources = [ "./nn_compiler/nn_compiler_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -295,10 +293,10 @@ ohos_unittest("NNCompiledCacheTest") { ] } -ohos_unittest("NNCompilerTest") { +ohos_unittest("TransformV1_0Test") { module_out_path = module_output_path - sources = [ "./nn_compiler/nn_compiler_test.cpp" ] + sources = [ "./v1_0/transform/transform_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -312,13 +310,15 @@ ohos_unittest("NNCompilerTest") { "drivers_interface_nnrt:libnnrt_proxy_1.0", "hilog:libhilog", "hitrace:libhitracechain", + "mindspore:mindir", ] } -ohos_unittest("TransformV1_0Test") { +ohos_unittest("InnerModelV1_0Test") { module_out_path = module_output_path - sources = [ "./v1_0/transform/transform_test.cpp" ] + sources = [ "./v1_0/inner_model/inner_model_test.cpp" ] + sources += [ "../common/v1_0/inner_model_mock_device.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -329,18 +329,19 @@ ohos_unittest("TransformV1_0Test") { ] external_deps = [ + "c_utils:utils", "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hdf_core:libhdf_utils", "hilog:libhilog", "hitrace:libhitracechain", "mindspore:mindir", ] } -ohos_unittest("InnerModelV1_0Test") { +ohos_unittest("NnTensorV1_0Test") { module_out_path = module_output_path - sources = [ "./v1_0/inner_model/inner_model_test.cpp" ] - sources += [ "../common/v1_0/inner_model_mock_device.cpp" ] + sources = [ "./v1_0/inner_model/nn_tensor_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -360,10 +361,10 @@ ohos_unittest("InnerModelV1_0Test") { ] } -ohos_unittest("NnTensorV1_0Test") { +ohos_unittest("NnTensorDescV1_0Test") { module_out_path = module_output_path - sources = [ "./v1_0/inner_model/nn_tensor_test.cpp" ] + sources = [ "./v1_0/inner_model/nn_tensor_desc_test.cpp" ] configs = [ ":module_private_config" ] deps = [ @@ -379,7 +380,6 @@ ohos_unittest("NnTensorV1_0Test") { "hdf_core:libhdf_utils", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } @@ -778,8 +778,6 @@ group("components_unittest") { ":NNCompilerTest", ":NeuralNetworkCoreV1_0Test", ":NnTensorDescV1_0Test", - ":NeuralNetworkRuntimeV1_0Test", - ":NeuralNetworkRuntimeV2_0Test", ":NnTensorV1_0Test", ":NnTensorV2_0Test", ":NnValidationV1_0Test", diff --git a/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp b/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp index e2c2c12..f316e45 100644 --- a/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp +++ b/test/unittest/components/v1_0/inner_model/nn_tensor_desc_test.cpp @@ -152,7 +152,7 @@ HWTEST_F(NnTensorDescTest, nn_set_shape_001, TestSize.Level1) HWTEST_F(NnTensorDescTest, nn_set_shape_002, TestSize.Level1) { TensorDesc tensordesc; - const int32_t testShape[] = {2, 3, 5}; + const int32_t testShape[] = { 2, 3, 5 }; size_t shapenum = 0; EXPECT_EQ(OH_NN_INVALID_PARAMETER, tensordesc.SetShape(testShape, shapenum)); } diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp index aa26cde..3ab17f1 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp @@ -1685,7 +1685,7 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ex * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. * @tc.type: FUNC */ - HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ext::TestSize.Level0) +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ext::TestSize.Level0) { size_t index = 1; size_t* minInputDims = nullptr; @@ -1700,7 +1700,8 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ex m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); - OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, &minInputDims, &maxInputDims, shapeLength); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, + &minInputDims, &maxInputDims, shapeLength); delete executor; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } @@ -1710,7 +1711,7 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ex * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function. * @tc.type: FUNC */ - HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ext::TestSize.Level0) +HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ext::TestSize.Level0) { size_t index = 1; size_t mindims = 1; @@ -1726,7 +1727,8 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ex m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); - OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, &minInputDims, &maxInputDims, shapeLength); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, + &minInputDims, &maxInputDims, shapeLength); delete executor; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } -- Gitee From db3aff03094e602aeb42db832c4f2e92fa215c3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 14:59:37 +0800 Subject: [PATCH 3/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- test/unittest/components/BUILD.gn | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 9e3ac94..2c680c7 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -777,6 +777,8 @@ group("components_unittest") { ":NNCompiledCacheTest", ":NNCompilerTest", ":NeuralNetworkCoreV1_0Test", + ":NeuralNetworkRuntimeV1_0Test", + ":NeuralNetworkRuntimeV2_0Test", ":NnTensorDescV1_0Test", ":NnTensorV1_0Test", ":NnTensorV2_0Test", -- Gitee From d66ca3c5f7bda997f7ab4073540f1c553385726e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 15:12:37 +0800 Subject: [PATCH 4/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- .../neural_network_core_test.cpp | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp index 3ab17f1..7a42549 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp @@ -1686,24 +1686,24 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ex * @tc.type: FUNC */ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ext::TestSize.Level0) - { - size_t index = 1; - size_t* minInputDims = nullptr; - size_t* maxInputDims = nullptr; - size_t* shapeLength = nullptr; - size_t m_backendID {0}; - std::shared_ptr m_preparedModel {nullptr}; - std::vector, OH_NN_TensorType>> m_inputTensorDescs; - std::vector, OH_NN_TensorType>> m_outputTensorDescs; - std::shared_ptr device = std::make_shared(); - NNExecutor* executor = new (std::nothrow) NNExecutor( - m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); +{ + size_t index = 1; + size_t* minInputDims = nullptr; + size_t* maxInputDims = nullptr; + size_t* shapeLength = nullptr; + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); - OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); - OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, - &minInputDims, &maxInputDims, shapeLength); - delete executor; - EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, + &minInputDims, &maxInputDims, shapeLength); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } /* -- Gitee From 0a77f9e7e89c7965f362ae15783a53e8a7dad2d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 15:27:05 +0800 Subject: [PATCH 5/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- .../neural_network_core_test.cpp | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp index 7a42549..3fe40ca 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp @@ -1704,7 +1704,7 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ex &minInputDims, &maxInputDims, shapeLength); delete executor; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); - } +} /* * @tc.name: nnt_nnexecutor_getinputdimRange_003 @@ -1712,25 +1712,25 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ex * @tc.type: FUNC */ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ext::TestSize.Level0) - { - size_t index = 1; - size_t mindims = 1; - size_t* minInputDims = &mindims; - size_t* maxInputDims = nullptr; - size_t* shapeLength = nullptr; - size_t m_backendID {0}; - std::shared_ptr m_preparedModel {nullptr}; - std::vector, OH_NN_TensorType>> m_inputTensorDescs; - std::vector, OH_NN_TensorType>> m_outputTensorDescs; - std::shared_ptr device = std::make_shared(); - NNExecutor* executor = new (std::nothrow) NNExecutor( - m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); - - OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); - OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, - &minInputDims, &maxInputDims, shapeLength); - delete executor; - EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); +{ + size_t index = 1; + size_t mindims = 1; + size_t* minInputDims = &mindims; + size_t* maxInputDims = nullptr; + size_t* shapeLength = nullptr; + size_t m_backendID {0}; + std::shared_ptr m_preparedModel {nullptr}; + std::vector, OH_NN_TensorType>> m_inputTensorDescs; + std::vector, OH_NN_TensorType>> m_outputTensorDescs; + std::shared_ptr device = std::make_shared(); + NNExecutor* executor = new (std::nothrow) NNExecutor( + m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs); + + OH_NNExecutor* nnExecutor = reinterpret_cast(&executor); + OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index, + &minInputDims, &maxInputDims, shapeLength); + delete executor; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); } /* -- Gitee From ac19e34062f372b7e91c7bced8139da12f7668bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=8F=B6=E5=86=9B?= Date: Fri, 5 Jul 2024 15:39:09 +0800 Subject: [PATCH 6/6] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 王叶军 --- .../v1_0/neural_network_core_test/neural_network_core_test.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp index 3fe40ca..888b38f 100644 --- a/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp +++ b/test/unittest/components/v1_0/neural_network_core_test/neural_network_core_test.cpp @@ -1731,7 +1731,7 @@ HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ex &minInputDims, &maxInputDims, shapeLength); delete executor; EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret); - } +} /* * @tc.name: nnt_nnexecutor_setonrundone_001 -- Gitee