diff --git a/test/fuzztest/iamgecompose_fuzzer/BUILD.gn b/test/fuzztest/iamgecompose_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..fe1cf433d66a4fbe9d49d15b516d9877aebaf947 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/BUILD.gn @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagecomposeFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/iamgecompose_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ + "./" + ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "iamgecompose_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..02da5e8aa44ea03db40d16a619c71ff2bfb255b7 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "iamgecompose_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + OH_ImageProcessing* imageProcessor = nullptr; + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_PixelmapNative *srcPic = nullptr; + OH_PixelmapNative *srcGainmap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&srcPic, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + CreatePixelmap(&srcGainmap, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, dstColorSpace); + OH_PixelmapNative_WritePixels(srcPic, const_cast(data), size); + OH_PixelmapNative_WritePixels(srcGainmap, const_cast(data), size); + bool ret = OH_ImageProcessing_Compose(imageProcessor, srcPic, srcGainmap, dst); + OH_PixelmapNative_Release(srcPic); + OH_PixelmapNative_Release(srcGainmap); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + OH_ImageProcessing_Destroy(imageProcessor); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..be69786f3906117bc062f4651be59d970bb39d02 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "iamgecompose_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/project.xml b/test/fuzztest/iamgecompose_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn b/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..2604010322da85f0b0843eaa3b80617ccda17eb0 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn @@ -0,0 +1,48 @@ +# Copyright (c) 2025 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#####################hydra-fuzz################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +##############################fuzztest########################################## +ohos_fuzztest("Imagesdr2hdrFuzzTest") { + module_out_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagesdr2hdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagesdr2hdr_fuzzer.cpp", + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixcelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init b/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init new file mode 100644 index 0000000000000000000000000000000000000000..8a4c0f702f037977084c459e8700bd657e860de4 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init @@ -0,0 +1,15 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +FUZZ \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..59bfd9fd3a4007846f12c2baa0a5494b8db701eb --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagesdr2hdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include "stddef.h" +#include "stdint.h" + +using namespace OHOS; +using namespace OHOS::Media; +using namespace OHOS::MediaAVCodec; +using namespace OHOS::MediaAVCodec::Codec; +using namespace std; +namespace { + +} // namespace + +void VDecServerSample::CallBack::OnError(AVCodecErrorType errorType, int32_t errorCode) +{ + cout << "--OnError--" << endl; + tester->isRunning_.store(false); + tester->signal_->inCond_.notify_all(); +} + +void VDecServerSample::CallBack::OnOutputFormatChanged(const Format &format) +{ + tester->GetOutputFormat(); +} + +void VDecServerSample::CallBack::OnInputBufferAvailable(uint32_t index, std::shared_ptr buffer) +{ + unique_lock lock(tester->signal_->inMutex_); + tester->signal_->inIdxQueue_.push(index); + tester->signal_->inBufferQueue_.push(buffer); + tester->signal_->inCond_.notify_all(); +} + +void VDecServerSample::CallBack::OnOutputBufferAvailable(uint32_t index, std::shared_ptr buffer) +{ + tester->codec_->ReleaseOutputBuffer(index); +} + +VDecServerSample::~VDecServerSample() +{ + if (codec_ != nullptr) { + codec_->Stop(); + codec_->Release(); + } + if (signal_ != nullptr) { + delete signal_; + signal_ = nullptr; + } +} + +int32_t VDecServerSample::ConfigServerDecoder() +{ + Format fmt; + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, 1); + fmt.PutDoubleValue(MediaDescriptionKey::MD_KEY_FRAME_RATE, frameRate); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_ROTATION_ANGLE, 0); + return codec_->Configure(fmt); +} + +int32_t VDecServerSample::SetCallback() +{ + shared_ptr cb = make_shared(this); + return codec_->SetCallback(cb); +} + +void VDecServerSample::RunVideoServerDecoder() +{ + codec_ = make_shared("OH.Media.Codec.Decoder.Video.AVC"); + if (codec_ == nullptr) { + cout << "Create failed" << endl; + return; + } + int32_t err = ConfigServerDecoder(); + if (err != AVCS_ERR_OK) { + cout << "ConfigServerDecoder failed" << endl; + return; + } + signal_ = new VDecSignal(); + if (signal_ == nullptr) { + cout << "Failed to new VDecSignal" << endl; + return; + } + err = SetCallback(); + if (err != AVCS_ERR_OK) { + cout << "SetCallback failed" << endl; + return; + } + err = codec_->Start(); + if (err != AVCS_ERR_OK) { + cout << "Start failed" << endl; + return; + } + isRunning_.store(true); + inputLoop_ = make_unique(&VDecServerSample::InputFunc, this); + if (inputLoop_ == nullptr) { + cout << "Failed to create input loop" << endl; + isRunning_.store(false); + } +} + +void VDecServerSample::InputFunc() +{ + int32_t time = 1000; + while (sendFrameIndex < frameIndex) { + if (!isRunning_.load()) { + break; + } + unique_lock lock(signal_->inMutex_); + signal_->inCond_.wait_for(lock, std::chrono::milliseconds(time), [this]() { + if (!isRunning_.load()) { + cout << "quit signal" << endl; + return true; + } + return signal_->inIdxQueue_.size() > 0; + }); + if (!isRunning_.load() || signal_->inIdxQueue_.size() == 0) { + break; + } + uint32_t index = signal_->inIdxQueue_.front(); + auto buffer = signal_->inBufferQueue_.front(); + signal_->inIdxQueue_.pop(); + signal_->inBufferQueue_.pop(); + lock.unlock(); + if (buffer->memory_ == nullptr) { + isRunning_.store(false); + break; + } + uint8_t *bufferAddr = buffer->memory_->GetAddr(); + if (memcpy_s(bufferAddr, buffer->memory_->GetCapacity(), fuzzData, fuzzSize) != EOK) { + break; + } + int32_t err = codec_->QueueInputBuffer(index); + if (err != AVCS_ERR_OK) { + cout << "QueueInputBuffer fail" << endl; + break; + } + sendFrameIndex++; + } +} + +void VDecServerSample::WaitForEos() +{ + if (inputLoop_ && inputLoop_->joinable()) { + inputLoop_->join(); + } +} + +void VDecServerSample::GetOutputFormat() +{ + Format fmt; + int32_t err = codec_->GetOutputFormat(fmt); + if (err != AVCS_ERR_OK) { + cout << "GetOutputFormat fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} + +void VDecServerSample::Flush() +{ + int32_t err = codec_->Flush(); + if (err != AVCS_ERR_OK) { + cout << "Flush fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} + +void VDecServerSample::Reset() +{ + int32_t err = codec_->Reset(); + if (err != AVCS_ERR_OK) { + cout << "Reset fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..d7789bbfb3543213a8f0e2eaa188aa031cbe53c2 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "imagesdr2hdr_fuzzer" diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml b/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..66e1dcac475475fb101b6f8670ec699e6e9696aa --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/imagedecompose_fuzzer/BUILD.gn b/test/fuzztest/imagedecompose_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..fe7a09eb1ca4cf18106a3ec1046e61b6df5c5133 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagedecomposeFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagedecompose_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagedecompose_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b927760d9b51e6da79d73741e98a1ed535d923c2 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "iamgedecompose_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + OH_ImageProcessing* imageProcessor = nullptr; + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dstPic = nullptr; + OH_PixelmapNative *dstGainmap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + CreatePixelmap(&dstPic, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + CreatePixelmap(&dstGainmap, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_Decompose(imageProcessor, src, dstPic, dstGainmap); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dstPic); + OH_PixelmapNative_Release(dstGainmap); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + OH_ImageProcessing_Destroy(imageProcessor); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..182273d5b11254cb04ce2f1c4ee362e9542fa270 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "imagedecompose_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/project.xml b/test/fuzztest/imagedecompose_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn b/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..678ef55cab2884a9457caf9efb6500f14e18e87c --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("Imagehdr2sdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagehdr2sdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagehdr2sdr_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e3c5a87a86d82ef1615b75ee4f9ba3df4a38939 --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagehdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..e8a56f9f3fdd0d012fdbe0c96254b9a5a551d69c --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "imagehdr2sdr_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/project.xml b/test/fuzztest/imagehdr2sdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn b/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..f4f3191dd4718ea77ceab3e42f626345f6ea802d --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagemetadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagemetadatagen_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagemetadatagen_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e37ceac3c225b1ba51bd20d0ee6dd81316fff3c0 --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagehdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_PixelmapNative *src = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, src); + OH_PixelmapNative_Release(src); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..2ee61274b59d8557fe7772d335956334bc1a867b --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "imagemetadatagen_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/project.xml b/test/fuzztest/imagemetadatagen_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn b/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..79590b56803e4a489b73a301f658cce89a1bb2a1 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("Imagesdr2sdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagesdr2sdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagesdr2sdr_fuzzer.cpp" + ] + external_deps = [ + "c_utils:utils", + "graphic_2d:libnative_color_space_manager", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "image_framework:pixelmap", + "image_framework:pixelmap_ndk", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..58e745995b50581adf4cafeb450cdaf5cb4c8f31 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagesdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(DISPLAY_P3); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..e8a56f9f3fdd0d012fdbe0c96254b9a5a551d69c --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "imagehdr2sdr_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/project.xml b/test/fuzztest/imagesdr2sdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/resources/ohos_test.xml b/test/fuzztest/resources/ohos_test.xml new file mode 100644 index 0000000000000000000000000000000000000000..68cdede6b585f311c03c0a5040c4261c54a2b310 --- /dev/null +++ b/test/fuzztest/resources/ohos_test.xml @@ -0,0 +1,25 @@ + + + + + + + + + diff --git a/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..85c92d6b64afecb2a6267da570c43bc24991afd5 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn @@ -0,0 +1,52 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#####################hydra-fuzz################### +import("//build/config/features.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") +module_output_path = "video_processing_engine/fuzztest" +##############################fuzztest########################################## +ohos_fuzztest("VideohdrtohdrFuzzTest") { + module_out_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrtohdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/interfaces/kits" + ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrtohdr_fuzzer.cpp", + "videodec_sample.cpp", + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + deps = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework:video_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/videohdrtohdr_fuzzer/project.xml b/test/fuzztest/videohdrtohdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e66a8a148101c27bb20fc31840572a11d86b585 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4051d88d7681399780b0a86faed382c7a0c2d88b --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "video_sample.h" + +using namespace std; +using namespace OHOS; +using namespace OHOS::Media; + +#define FUZZ_PROJECT_NAME "videohdrtohdr_fuzzer" +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +namespace OHOS { + VideoSample *sample = nullptr; + bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) + { + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); + } +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..a2330e961641a6cb21f3ca20b4925235a5c3dcda --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "videohdrtohdr_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..37a74722da34443ebecd88d2952a0f3a72c3d1a3 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideohdrvividtohdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrvividtohdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrvividtohdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml b/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c6a1e7b2425d720443327cff171487e3dd28e1a5 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp @@ -0,0 +1,192 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t ret = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CHECK_AND_RETURN_RET(ret == 0, ret, "set metadata value failed"); + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..23bb20a0f1786428293b08fbbce9ba26a677ab02 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "videohdrvividtohdr_fuzzer.h" +#include "video_sample.h" +#include +#include + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..a2330e961641a6cb21f3ca20b4925235a5c3dcda --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "videohdrtohdr_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..554fc0ee58f7bb74c5933b70e5b932b7bf312e03 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideohdrvividtohdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrvividtosdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrvividtosdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml b/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e29e1ebdf5b029c30b3e9f4c4f84b18feae4a66 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp @@ -0,0 +1,191 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t ret = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CHECK_AND_RETURN_RET(ret == 0, ret, "set metadata value failed"); + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..36dd42655967bdbdce6ba38bc29dd7ab070f5c5a --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "videohdrvividtosdr_fuzzer.h" +#include "video_sample.h" +#include +#include + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..4baffe285771dec6b161d55ee1595a84e4366066 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "videohdrvividtosdr_fuzzer" \ No newline at end of file diff --git a/test/fuzztest/videometadatagen_fuzzer/BUILD.gn b/test/fuzztest/videometadatagen_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..e50c615f92a771b94025467e7978351d71158d21 --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideometadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videometadatagen_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videometadatagen_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videometadatagen_fuzzer/project.xml b/test/fuzztest/videometadatagen_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp b/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ae37b1fe1b38bf97c4e9d2386f48354288e6885e --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp @@ -0,0 +1,187 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "video_processing_types.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + uint8_t val = OH_VIDEO_HDR_VIVID; + ret = OH_NativeWindow_SetMetadataValue(outWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (ret != 0) { + cout << "set metadata failed" << endl; + } + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videometadatagen_fuzzer/video_sample.h b/test/fuzztest/videometadatagen_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp b/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b493ef47943b5fa6682a4caba6f5ed7875c8ab9f --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include "video_sample.h" + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn b/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..3b444c00bd2b750601442b7f220ea6249749a3ff --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideometadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videosdrtosdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videosdrtosdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/project.xml b/test/fuzztest/videosdrtosdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0e5d02bf96379c246a1ef68dc55b4af7c1c40b24 --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3 / 2, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f08807ba24309fd732ab03fc683d018a528aecf7 --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include "video_sample.h" + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..4baffe285771dec6b161d55ee1595a84e4366066 --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define FUZZ_PROJECT_NAME "videohdrvividtosdr_fuzzer" \ No newline at end of file diff --git a/test/nativedemo/vpe_demo2/BUILD.gn b/test/nativedemo/vpe_demo2/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..255c59b0861ffdf45a78a31ff30e71319fcb6f66 --- /dev/null +++ b/test/nativedemo/vpe_demo2/BUILD.gn @@ -0,0 +1,152 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_executable("vpe_video_nativedemo") { + include_dirs = [ + "$CAPI_DIR", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/moduletest/common", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../../graphic/graphic_2d/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../../window/window_manager/interfaces/innerkits" + ] + + cflags = [ + "-Wall", + "-fno-rtti", + "-fno-exceptions", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wno-unused-parameter", + "-Wno-deprecated-declarations", + ] + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "video_processing_demo.cpp", + "../common/yuv_viewer.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:video_processing" + ] + + external_deps = [ + "c_utils:utils", + "ffmpeg:libohosffmpeg", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + + install_enable = false + + part_name = "video_processing_engine" + subsystem_name = "multimedia" +} + + + + + + + + + + + + + + +video_demo_native_include_dirs = [ + "$vpe_interface_capi_dir", + "$vpe_capi_root_dir/test/moduletest/common", + "$vpe_capi_root_dir/../../../graphic/graphic_2d/interfaces/inner_api", + "$vpe_capi_root_dir/../../../window/window_manager/interfaces/innerkits" +] + +video_nativedemo_cflags = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter", +] + +################################################################################################################## +ohos_executable("vpe_video_native_demo") { + include_dirs = video_demo_native_include_dirs + include_dirs += [ "./" ] + cflags = video_nativedemo_cflags + + sources = [ + "video_processing_demo.cpp", + "../common/yuv_viewer.cpp", + ] + + deps = [ + "$vpe_capi_root_dir/framework:video_processing" + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + + install_enable = false + + part_name = "video_processing_engine" + subsystem_name = "multimedia" +} diff --git a/test/nativedemo/vpe_demo2/video_processing_demo.cpp b/test/nativedemo/vpe_demo2/video_processing_demo.cpp new file mode 100644 index 0000000000000000000000000000000000000000..13a298863144b77c9146b1f0b439fe03be3b1b4c --- /dev/null +++ b/test/nativedemo/vpe_demo2/video_processing_demo.cpp @@ -0,0 +1,376 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "avcodec_e2e_demo.h" + +#include +#include +#include +#include +#include +#include +#include "av_common.h" +#include "avcodec_common.h" +#include "avcodec_errors.h" +#include "native_avcodec_videodecoder.h" +#include "native_avcodec_videoencoder.h" +#include "media_description.h" +#include "native_avformat.h" +#include "native_avcodec_base.h" + +using namespace OHOS; +using namespace std; +constexpr int64_t MICRO_IN_SECOND = 1000000L; +constexpr float FRAME_INTERVAL_TIMES = 1.5; +constexpr int32_t AUDIO_BUFFER_SIZE = 1024 * 1024; +constexpr double DEFAULT_FRAME_RATE = 25.0; +constexpr std::chrono::seconds STOP_TIMEOUT(10); + +static uint32_t g_onErrorCount = 0; +static VideoProcessing_State g_state = VIDEO_PROCESSING_STATE_STOPPED; +static std::mutex g_Mutex; +static std::condition_variable g_Cond; + +static int64_t GetFileSize(const char *fileName) +{ + int64_t fileSize = 0; + if (fileName != nullptr) { + struct stat fileStatus {}; + if (stat(fileName, &fileStatus) == 0) { + fileSize = static_cast(fileStatus.st_size); + } + } + return fileSize; +} + +static void OnError(OH_AVCodec *codec, int32_t errorCode, void *userData) +{ + (void)codec; + (void)userData; + cout<<"error :"<(userData); + OH_AVDemuxer_ReadSampleBuffer(demo->demuxer, demo->videoTrackID, buffer); + OH_VideoDecoder_PushInputBuffer(codec, index); +} + +static void OnDecOutputBufferAvailable(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData) +{ + VideoProcessingDemo *demo = static_cast(userData); + OH_AVCodecBufferAttr attr; + OH_AVBuffer_GetBufferAttr(buffer, &attr); + if (attr.flags & AVCODEC_BUFFER_FLAGS_EOS) { + OH_VideoEncoder_NotifyEndOfStream(demo->enc); + } + OH_VideoDecoder_RenderOutputBuffer(codec, index); +} + +static void OnEncStreamChanged(OH_AVCodec *codec, OH_AVFormat *format, void *userData) +{ + cout<<"format changed"<(userData); + OH_AVCodecBufferAttr attr; + OH_AVBuffer_GetBufferAttr(buffer, &attr); + if (attr.flags & AVCODEC_BUFFER_FLAGS_EOS) { + demo->isFinish.store(true); + demo->waitCond.notify_all(); + return; + } + OH_AVMuxer_WriteSampleBuffer(demo->muxer, 0, buffer); + OH_VideoEncoder_FreeOutputBuffer(codec, index); +} + + +static void OnVPEError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + g_onErrorCount++; + std::cout << "OnError callback recv errorcode:" << error << std::endl; +} + +static void OnVPEState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); + if (ret != VIDEO_PROCESSING_SUCCESS) { + g_onErrorCount++; + std::cout << "Render output buffer failed,errcode: "<< ret << std::endl; + } +} + +VideoProcessingDemo::VideoProcessingDemo(const char *file, VideoProcessing_ColorSpaceInfo inInfo, + VideoProcessing_ColorSpaceInfo outInfo) +{ + inInfo_ = inInfo; + outInfo_ = outInfo; + fd = open(file, O_RDONLY); + outFd = open("./output.mp4", O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR); + int64_t size = GetFileSize(file); + inSource = OH_AVSource_CreateWithFD(fd, 0, size); + if (!inSource) { + cout << "create source failed" << endl; + } + OH_VideoProcessing_InitializeEnvironment(); + demuxer = OH_AVDemuxer_CreateWithSource(inSource); + muxer = OH_AVMuxer_Create(outFd, AV_OUTPUT_FORMAT_MPEG_4); + if (!muxer || !demuxer) { + cout << "create muxer demuxer failed" << endl; + } + OH_AVFormat *sourceFormat = OH_AVSource_GetSourceFormat(inSource); + OH_AVFormat_GetIntValue(sourceFormat, OH_MD_KEY_TRACK_COUNT, &trackCount); + for (int32_t index = 0; index < trackCount; index++) { + SetTrackFormat(index); + } + OH_AVFormat_Destroy(sourceFormat); +} + +void VideoProcessingDemo::SetTrackFormat(int32_t index) +{ + OH_AVFormat *trackFormat = OH_AVSource_GetTrackFormat(inSource, index); + OH_AVDemuxer_SelectTrackByID(demuxer, index); + int32_t trackType = -1; + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_TRACK_TYPE, &trackType); + if (trackType == MEDIA_TYPE_VID) { + videoTrackID = index; + OH_AVMuxer_AddTrack(muxer, &muxVideoTrackID, trackFormat); + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_TRACK_TYPE, &trackType); + char *mime = nullptr; + OH_AVFormat_GetStringValue(trackFormat, OH_MD_KEY_CODEC_MIME, &mime); + dec = OH_VideoDecoder_CreateByMime(mime); + if (!needRender) { + enc = OH_VideoEncoder_CreateByMime(OH_AVCODEC_MIMETYPE_VIDEO_HEVC); + } + if (isMetadataGen) { + OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + } else { + OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + if (!enc || !dec || !videoProcessor) { + cout << "create codec or processor failed" << endl; + return; + } + int32_t isVividIn = false; + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_VIDEO_IS_HDR_VIVID, &isVividIn); + viewer = std::make_unique(); + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PIXEL_FORMAT, inInfo_.pixelFormat); + OH_VideoDecoder_Configure(dec, trackFormat); + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PIXEL_FORMAT, outInfo_.pixelFormat); + if (outInfo_.colorSpace == OH_COLORSPACE_BT2020_HLG_LIMIT || + outInfo_.colorSpace == OH_COLORSPACE_BT2020_PQ_LIMIT) { + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PROFILE, HEVC_PROFILE_MAIN_10); + isHDRVividOut = isVividIn; + } else { + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PROFILE, HEVC_PROFILE_MAIN); + } + if (!needRender) { + OH_VideoEncoder_Configure(enc, trackFormat); + } + } else if (trackType == MEDIA_TYPE_AUD) { + audioTrackID = index; + OH_AVMuxer_AddTrack(muxer, &muxAudioTrackID, trackFormat); + } + OH_AVFormat_Destroy(trackFormat); +} + + +VideoProcessingDemo::~VideoProcessingDemo() +{ + OH_VideoProcessing_Destroy(videoProcessor); + OH_VideoProcessing_DeinitializeEnvironment(); + if (dec) { + OH_VideoDecoder_Destroy(dec); + } + if (enc) { + OH_VideoEncoder_Destroy(enc); + } + if (muxer) { + OH_AVMuxer_Destroy(muxer); + } + if (demuxer) { + OH_AVDemuxer_Destroy(demuxer); + } + if (inSource) { + OH_AVSource_Destroy(inSource); + } + close(fd); + close(outFd); +} + +void VideoProcessingDemo::ConfigureCodec() +{ + if (!needRender) { + OH_AVCodecCallback encCallback; + encCallback.onError = OnError; + encCallback.onStreamChanged = OnEncStreamChanged; + encCallback.onNeedInputBuffer = OnEncInputBufferAvailable; + encCallback.onNewOutputBuffer = OnEncOutputBufferAvailable; + OH_VideoEncoder_RegisterCallback(enc, encCallback, this); + } + OH_AVCodecCallback decCallback; + decCallback.onError = OnError; + decCallback.onStreamChanged = OnDecStreamChanged; + decCallback.onNeedInputBuffer = OnDecInputBufferAvailable; + decCallback.onNewOutputBuffer = OnDecOutputBufferAvailable; + OH_VideoDecoder_RegisterCallback(dec, decCallback, this); + if (needRender) { + outWindow = viewer->CreateWindow(width_, height_, param.outFmt, param.outColorSpace, isHDRVividOut); + } else { + OH_VideoEncoder_GetSurface(enc, &outWindow); + } + int32_t ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + if (ret != VIDEO_PROCESSING_SUCCESS) { + std::cout<<" get vpe input surface failed"<(&VideoProcessingDemo::WriteAudioTrack, this); + } +} + +void VideoProcessingDemo::WaitForEOS() +{ + std::mutex waitMtx; + unique_lock lock(waitMtx); + waitCond.wait(lock, [this]() { + return isFinish.load(); + }); + if (audioThread) { + audioThread->join(); + } + cout << "task finish" << endl; +} + +void VideoProcessingDemo::Stop() +{ + OH_VideoDecoder_Stop(dec); + if (!needRender) { + OH_VideoEncoder_Stop(enc); + } + OH_VideoProcessing_Stop(videoProcessor); + unique_lock lock(g_Mutex); + if (g_Cond.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) { + std::cout << "waiting stop state timeout" << std::endl; + } + OH_AVMuxer_Stop(muxer); +} + + + +int main(int32_t argc ,char *argv[]) +{ + if (argc < 7) { + std::cout<< "parameter not enough."<(path, isMetadataGen, inInfo, outInfo); + vpeDemo->needRender = atoi(argv[6]); + vpeDemo->Configure(); + vpeDemo->Start(); + vpeDemo->WaitForEOS(); + vpeDemo->Stop(); + return 0; +} \ No newline at end of file diff --git a/test/nativedemo/vpe_demo2/video_processing_demo.h b/test/nativedemo/vpe_demo2/video_processing_demo.h new file mode 100644 index 0000000000000000000000000000000000000000..48f953bd0453117587641972619fc011145e1ab6 --- /dev/null +++ b/test/nativedemo/vpe_demo2/video_processing_demo.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VIDEO_PROCESSING_DEMO_H +#define VIDEO_PROCESSING_DEMO_H + +#include +#include +#include + +#include "native_avcodec_base.h" +#include "native_avdemuxer.h" +#include "native_avmuxer.h" +#include "native_avsource.h" +#include "native_avformat.h" +#include "video_processing.h" +namespace OHOS { +class VideoProcessingDemo { +public: + VideoProcessingDemo(const char *file, bool isMetaDataGen, + VideoProcessing_ColorSpaceInfo inInfo, + VideoProcessing_ColorSpaceInfo outInfo); + ~VideoProcessingDemo(); + void SetTrackFormat(); + void ConfigureCodec(); + viod ConfigureProcessor(); + void Start(); + void WaitForEOS(); + void Stop(); + void WriteAudioTrack(); + OH_AVDemuxer *demuxer = nullptr; + OH_AVMuxer *muxer = nullptr; + uint32_t videoTrackID = -1; + uint32_t audioTrackID = -1; + uint32_t muxVideoTrackID = -1; + uint32_t muxAudioTrackID = -1; + OH_AVCodec *dec = nullptr; + OH_AVCodec *enc = nullptr; + std::condition_variable waitCond; + std::atomic isFinish; + uint32_t frameDuration = 0; + std::unique_ptr audioThread; + bool needRender = false; +private: + int32_t width_ = 0; + int32_t height_ = 0; + int32_t isHDRVividOut = 0; + VideoProcessing_ColorSpaceInfo inInfo_; + VideoProcessing_ColorSpaceInfo outInfo_; + OH_AVSource *inSource = nullptr; + int32_t trackCount = 0; + int32_t fd; + int32_t outFd; + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_Callback* callback = nullptr; + OHNativeWindow *inWindow = nullptr; + const OHNativeWindow *outWindow = nullptr; + std::unique_ptr viewer; +}; +} + +#endif diff --git a/test/ndk/moduletest/common/yuv_viewer.cpp b/test/ndk/moduletest/common/yuv_viewer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cb377cb393e34b67a2e269ed140b6a652f3f653d --- /dev/null +++ b/test/ndk/moduletest/common/yuv_viewer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_viewer.h" +#include +#include +#include "refbase.h" +#include "surface/window.h" +#include "surface.h" +#include "ui/rs_surface_node.h" +#include "window_option.h" +using namespace OHOS; + +YuvViewer::~YuvViewer() +{ + if (window_) { + OH_NativeWindow_DestroyNativeWindow(window_); + window_ = nullptr; + } + if (rosenWindow_) { + rosenWindow_->Destroy(); + rosenWindow_ = nullptr; + } +} + +OHNativeWindow *YuvViewer::CreateWindow(uint32_t width, uint32_t height, + OH_NativeBuffer_Format pix_fmt, OH_NativeBuffer_ColorSpace color_space, bool isHDRVivid) +{ + sptr surfaceProducer; + sptr option = new Rosen::WindowOption(); + option->SetWindowType(Rosen::WindowType::WINDOW_TYPE_FLOAT); + option->SetWindowMode(Rosen::WindowMode::WINDOW_MODE_FULLSCREEN); + rosenWindow_ = Rosen::Window::Create("VpeDemo", option); + if (rosenWindow_ == nullptr) { + std::cout << "rosen window create failed" << std::endl; + return nullptr; + } + rosenWindow_->SetTurnScreenOn(!rosenWindow_->IsTurnScreenOn()); + rosenWindow_->SetKeepScreenOn(true); + rosenWindow_->Show(); + surfaceProducer = rosenWindow_->GetSurfaceNode()->GetSurface(); + + window_ = CreateNativeWindowFromSurface(&surfaceProducer); + + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_TRANSFORM, 1); // 1: rotation 90° + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_BUFFER_GEOMETRY, + width, height); + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_FORMAT, pix_fmt); + return window_; +} \ No newline at end of file diff --git a/test/ndk/moduletest/common/yuv_viewer.h b/test/ndk/moduletest/common/yuv_viewer.h new file mode 100644 index 0000000000000000000000000000000000000000..a259283cdd8b918ae50d01bfbe5110003022acc1 --- /dev/null +++ b/test/ndk/moduletest/common/yuv_viewer.h @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_YUV_VIEWER_H +#define VPE_TEST_YUV_VIEWER_H +#include "wm/window.h" +#include "surface/native_image.h" +#include "surface/native_buffer.h" +namespace OHOS { +class YuvViewer{ +public: + ~YuvViewer(); + OHNativeWindow *CreateWindow(uint32_t width, uint32_t height, + OH_NativeBuffer_Format pix_fmt, OH_NativeBuffer_ColorSpace color_space, bool isHDRVivid); + +private: + sptr rosenWindow_; + OHNativeWindow *window_ = nullptr; + +}; +} +#endif // VPE_TEST_YUV_VIEWER_H \ No newline at end of file diff --git a/test/ndk/moduletest/image/BUILD.gn b/test/ndk/moduletest/image/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..ed45974cc1ada8e6a003519133bc507237ef1c1f --- /dev/null +++ b/test/ndk/moduletest/image/BUILD.gn @@ -0,0 +1,86 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +module_output_path = MODULE_TEST_OUTPUT_PATH + +image_moduletest_native_include_dirs = [ + "$CAPI_DIR", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/moduletest/common", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../graphic/graphic_2d/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../window/window_manager/interfaces/innerkits", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../interface/sdk_c/multimedia/image_framework/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../image_framework/interfaces/kits/native/include", +] + +image_moduletest_cflags = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter", +] + +################################################################################################################## +ohos_unittest("vpe_image_native_module_test") { + module_out_path = module_output_path + include_dirs = image_moduletest_native_include_dirs + include_dirs += [ "./" ] + cflags = image_moduletest_cflags + + sources = [ + "api_test.cpp", + #"capability_test.cpp", + "func_test.cpp", + "reli_test.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:image_processing", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../arkui/napi:ace_napi", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../image_framework/frameworks/kits/js/common/pixelmap_ndk:pixelmap_ndk", + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + "image_framework:pixelmap", + ] + + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/ndk/moduletest/resources/ohos_test.xml" +} diff --git a/test/ndk/moduletest/image/api_test.cpp b/test/ndk/moduletest/image/api_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..65c7a2e03a89ab7d0ecff8d81ccb30eabeeeb56c --- /dev/null +++ b/test/ndk/moduletest/image/api_test.cpp @@ -0,0 +1,1461 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include "gtest/gtest.h" +#include "image_processing.h" +#include "nocopyable.h" +#include "image/pixelmap_native.h" +#include "surface/native_buffer.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; +namespace { +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +constexpr uint32_t PIX_SIZE = DEFAULT_WIDTH * DEFAULT_HEIGHT * 4; +uint8_t *g_pixData = nullptr; + +OH_Pixelmap_InitializationOptions *g_createOpts_RGBA = nullptr; +OH_Pixelmap_InitializationOptions *g_createOpts_BGRA = nullptr; +OH_PixelmapNative *pixelMap_RGBA = nullptr; +OH_PixelmapNative *pixelMap_BGRA = nullptr; +void InitCreatePixelmapParam_RGBA(){ + if (g_createOpts_RGBA == nullptr) + { + OH_PixelmapInitializationOptions_Create(&g_createOpts_RGBA); + OH_PixelmapInitializationOptions_SetWidth(g_createOpts_RGBA, DEFAULT_WIDTH); + OH_PixelmapInitializationOptions_SetHeight(g_createOpts_RGBA, DEFAULT_HEIGHT); + OH_PixelmapInitializationOptions_SetPixelFormat(g_createOpts_RGBA, NATIVEBUFFER_PIXEL_FMT_RGBA_8888); + } +} +void InitCreatePixelmapParam_BGRA(){ + if (g_createOpts_BGRA == nullptr) + { + OH_PixelmapInitializationOptions_Create(&g_createOpts_BGRA); + OH_PixelmapInitializationOptions_SetWidth(g_createOpts_BGRA, DEFAULT_WIDTH); + OH_PixelmapInitializationOptions_SetHeight(g_createOpts_BGRA, DEFAULT_HEIGHT); + OH_PixelmapInitializationOptions_SetPixelFormat(g_createOpts_BGRA, PIXEL_FORMAT_BGRA_8888); + } +} + + +class VpeImageApiTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + + +void VpeImageApiTest::SetUpTestCase() { + g_pixData = new uint8_t[PIX_SIZE]; + InitCreatePixelmapParam_RGBA(); + InitCreatePixelmapParam_BGRA(); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &pixelMap_RGBA); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_BGRA, &pixelMap_BGRA); +} +void VpeImageApiTest::TearDownTestCase() { + if (g_pixData) { + delete[] g_pixData; + } + if (g_createOpts_RGBA) { + OH_PixelmapInitializationOptions_Release(g_createOpts_RGBA); + g_createOpts_RGBA = nullptr; + } + if (g_createOpts_BGRA) { + OH_PixelmapInitializationOptions_Release(g_createOpts_BGRA); + g_createOpts_BGRA = nullptr; + } +} +void VpeImageApiTest::SetUp() +{ + +} +void VpeImageApiTest::TearDown() +{ + OH_ImageProcessing_DeinitializeEnvironment(); +} + + +const ImageProcessing_ColorSpaceInfo SRC_INFO = {0, + OH_COLORSPACE_SRGB_FULL, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; +const ImageProcessing_ColorSpaceInfo DST_INFO = {0, + OH_COLORSPACE_P3_FULL, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; +const ImageProcessing_ColorSpaceInfo SRC_GAIN_INFO = {0, + OH_COLORSPACE_SRGB_FULL, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; +const ImageProcessing_ColorSpaceInfo DST_GAIN_INFO = {0, + OH_COLORSPACE_BT709_FULL, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; +const ImageProcessing_ColorSpaceInfo UNSUPPORTED_INFO = {0, + OH_COLORSPACE_BT601_EBU_FULL, + NATIVEBUFFER_PIXEL_FMT_BGRX_8888}; + +} + +namespace { +/** + * @tc.number : VPE_IMAGE_API_TEST_0010 + * @tc.name : first call OH_ImageProcessing_InitializeEnvironment + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0010, TestSize.Level0) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); + } + OH_ImageProcessing_DeinitializeEnvironment(); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0020 + * @tc.name : first call OH_ImageProcessing_DeinitializeEnvironment + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0020, TestSize.Level0) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_DeinitializeEnvironment(); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0030 + * @tc.name : first call OH_ImageProcessing_DeinitializeEnvironment after initialize + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0030, TestSize.Level0) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_ImageProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0040 + * @tc.name : call OH_ImageProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0040, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0050 + * @tc.name : call OH_ImageProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0050, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(&SRC_INFO, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0060 + * @tc.name : call OH_ImageProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0060, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(nullptr, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0070 + * @tc.name : call OH_ImageProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0070, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0080 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0080, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(nullptr, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0090 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0090, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(&SRC_INFO, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0100 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0100, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(nullptr, nullptr, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0110 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0110, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(nullptr, &SRC_GAIN_INFO, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0120 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0120, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(&SRC_INFO, nullptr, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0130 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0130, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(nullptr, &SRC_GAIN_INFO, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0131 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0131, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(&SRC_INFO, &SRC_GAIN_INFO, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0140 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0140, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(&SRC_INFO, &SRC_GAIN_INFO, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0150 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0150, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(nullptr, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0160 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0160, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(&SRC_INFO, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0170 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0170, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(nullptr, &DST_INFO, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0180 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0180, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(nullptr, nullptr, &DST_GAIN_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0190 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0190, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(&SRC_INFO, nullptr, &DST_GAIN_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0200 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0200, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(nullptr, &DST_INFO, &DST_GAIN_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0201 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0201, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(&SRC_INFO, &DST_INFO, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0210 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0210, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(&SRC_INFO, &DST_INFO, &DST_GAIN_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0220 + * @tc.name : call OH_ImageProcessing_IsMetadataGenerationSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0220, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsMetadataGenerationSupported(nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0230 + * @tc.name : call OH_ImageProcessing_IsMetadataGenerationSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0230, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + const ImageProcessing_ColorSpaceInfo HDR_INFO = {0, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102}; + bool ret = OH_ImageProcessing_IsMetadataGenerationSupported(&HDR_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0240 + * @tc.name : call OH_ImageProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0240, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(nullptr, INT_MAX); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0250 + * @tc.name : call OH_ImageProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0250, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(nullptr, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0260 + * @tc.name : call OH_ImageProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0260, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, INT_MAX); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0270 + * @tc.name : call OH_ImageProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0270, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0280 + * @tc.name : call OH_ImageProcessing_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0280, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Destroy(nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0290 + * @tc.name : call OH_ImageProcessing_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0290, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0450 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0450, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_ConvertColorSpace(nullptr, + nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0460 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0460, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, nullptr, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0470 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0470, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_ConvertColorSpace(nullptr, pixelMap_RGBA, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0480 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0480, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_ConvertColorSpace( + nullptr, nullptr, pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0490 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0490, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, nullptr, pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0500 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0500, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_ConvertColorSpace( + nullptr, src_pixelMap_RGBA, dst_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0510 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace with the color space + * of the image is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0510, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, pixelMap_RGBA, pixelMap_BGRA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0520 + * @tc.name : call OH_ImageProcessing_ConvertColorSpace + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0520, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src_pixelMap_RGBA, dst_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0530 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0530, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Compose(nullptr, nullptr, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0540 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0540, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_Compose(imageProcessor, nullptr, nullptr, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0550 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0550, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Compose(nullptr, src_pixelMap_RGBA, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(src_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0560 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0560, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Compose(nullptr, nullptr, src_gain_pixelMap_RGBA, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0570 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0570, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Compose(nullptr, nullptr, nullptr, dst_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0580 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0580, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, src_pixelMap_RGBA, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(src_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0590 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0590, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, nullptr, src_gain_pixelMap_RGBA, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0600 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0600, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, nullptr, nullptr, dst_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0610 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0610, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, src_pixelMap_RGBA, src_gain_pixelMap_RGBA, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0620 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0620, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, nullptr, src_gain_pixelMap_RGBA, dst_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0630 + * @tc.name : call OH_ImageProcessing_Compose by the color space + * of the image is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0630, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_BGRA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_BGRA, &dst_pixelMap_BGRA); + ret = OH_ImageProcessing_Compose(imageProcessor, src_pixelMap_RGBA, src_gain_pixelMap_RGBA, dst_pixelMap_BGRA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_BGRA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0640 + * @tc.name : call OH_ImageProcessing_Compose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0640, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_COMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, src_pixelMap_RGBA, src_gain_pixelMap_RGBA, dst_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0650 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0650, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Decompose(nullptr, nullptr, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0660 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0660, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_Decompose(imageProcessor, nullptr, nullptr, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0670 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0670, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Decompose(nullptr, src_pixelMap_RGBA, nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(src_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0680 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0680, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Decompose(nullptr, nullptr, dst_pixelMap_RGBA, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(dst_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0690 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0690, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *dst_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_gain_pixelMap_RGBA); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Decompose(nullptr, nullptr, nullptr, dst_gain_pixelMap_RGBA); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(dst_gain_pixelMap_RGBA); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0700 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0700, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, src_pixelMap_RGBA, nullptr, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0710 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0710, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, nullptr, dst_pixelMap_RGBA, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0720 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0720, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *dst_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, nullptr, nullptr, dst_gain_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + OH_PixelmapNative_Release(dst_gain_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0730 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0730, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, src_pixelMap_RGBA, dst_pixelMap_RGBA, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0740 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0740, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + OH_PixelmapNative *dst_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, nullptr, dst_pixelMap_RGBA, dst_gain_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_gain_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0750 + * @tc.name : call OH_ImageProcessing_Decompose by the color space + * of the image is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0750, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + OH_PixelmapNative *dst_gain_pixelMap_BGRA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_BGRA, &dst_gain_pixelMap_BGRA); + ret = OH_ImageProcessing_Decompose(imageProcessor, src_pixelMap_RGBA, dst_pixelMap_RGBA, dst_gain_pixelMap_BGRA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_gain_pixelMap_BGRA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0760 + * @tc.name : call OH_ImageProcessing_Decompose + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0760, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_DECOMPOSITION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + OH_PixelmapNative *dst_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, src_pixelMap_RGBA, dst_pixelMap_RGBA, dst_gain_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_gain_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0770 + * @tc.name : call OH_ImageProcessing_GenerateMetadata + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0770, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_GenerateMetadata(nullptr, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + EXPECT_NE(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0780 + * @tc.name : call OH_ImageProcessing_GenerateMetadata + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0780, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, nullptr); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0790 + * @tc.name : call OH_ImageProcessing_GenerateMetadata by the color space + * of the image is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0790, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_BGRA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_BGRA, &src_pixelMap_BGRA); + ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, src_pixelMap_BGRA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING); + OH_PixelmapNative_Release(src_pixelMap_BGRA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0800 + * @tc.name : call OH_ImageProcessing_GenerateMetadata + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0800, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* imageProcessor = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&imageProcessor, + IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, src_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative_Release(src_pixelMap_RGBA); + } + OH_ImageProcessing_Destroy(imageProcessor); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0890 + * @tc.name : call OH_ImageProcessing_IsColorSpaceConversionSupported + * the the color space conversion is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0890, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(&UNSUPPORTED_INFO, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0900 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * the the color space conversion is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0900, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsCompositionSupported(&UNSUPPORTED_INFO, &SRC_GAIN_INFO, &DST_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0910 + * @tc.name : call OH_ImageProcessing_IsDecompositionSupported + * the the color space conversion is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0910, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsDecompositionSupported(&UNSUPPORTED_INFO, &DST_INFO, &DST_GAIN_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0920 + * @tc.name : call OH_ImageProcessing_IsMetadataGenerationSupported + * the the color space conversion is unsupported. + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0920, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + bool ret = OH_ImageProcessing_IsMetadataGenerationSupported(&UNSUPPORTED_INFO); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0930 + * @tc.name : call OH_ImageProcessing_IsMetadataGenerationSupported + * @tc.desc : function test + */ +HWTEST_F(VpeImageApiTest, VPE_IMAGE_API_TEST_0930, TestSize.Level0) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_PixelmapNative *src_pixelMap_RGBA = nullptr; + OH_PixelmapNative *src_gain_pixelMap_RGBA = nullptr; + OH_PixelmapNative *dst_pixelMap_RGBA = nullptr; + OH_PixelmapNative *dst_gain_pixelMap_RGBA = nullptr; + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_pixelMap_RGBA); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_pixelMap_RGBA); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &src_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Compose(imageProcessor, src_pixelMap_RGBA, src_gain_pixelMap_RGBA, dst_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + OH_PixelmapNative_CreatePixelmap(g_pixData, PIX_SIZE, g_createOpts_RGBA, &dst_gain_pixelMap_RGBA); + ret = OH_ImageProcessing_Decompose(imageProcessor, src_pixelMap_RGBA, dst_pixelMap_RGBA, dst_gain_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, src_pixelMap_RGBA); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE); + } + OH_PixelmapNative_Release(src_pixelMap_RGBA); + OH_PixelmapNative_Release(src_gain_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_pixelMap_RGBA); + OH_PixelmapNative_Release(dst_gain_pixelMap_RGBA); +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/image/capability_test.cpp b/test/ndk/moduletest/image/capability_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b51ded64bbfcf6e95d48562ab1a7067c559a7960 --- /dev/null +++ b/test/ndk/moduletest/image/capability_test.cpp @@ -0,0 +1,829 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "image_processing_types.h" +#include "image_processing.h" +#include "enum_list.h" +#include "image/pixelmap_native.h" + +using namespace std; +// using namespace OHOS; +// using namespace OHOS::Media; +using namespace testing::ext; +namespace { +class VpeVideoCapTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; +void VpeVideoCapTest::SetUpTestCase() +{ + OH_ImageProcessing_InitializeEnvironment(); +} +void VpeVideoCapTest::TearDownTestCase() +{ + OH_ImageProcessing_DeinitializeEnvironment(); +} +void VpeVideoCapTest::SetUp() {} +void VpeVideoCapTest::TearDown() {} +} +namespace +{ +/** + * @tc.number : COLORSPACE_SUPPORT_001 + * @tc.name : adobergb to srgb + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_001, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = ADOBE_RGB_1998; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = NONE; + outputFormat.colorSpace = SRGB; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + + +/** + * @tc.number : COLORSPACE_SUPPORT_002 + * @tc.name : adobergb to display p3 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_002, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = ADOBE_RGB_1998; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = NONE; + outputFormat.colorSpace = DISPLAY_P3; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_003 + * @tc.name : srgb to display p3 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_003, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = NONE; + outputFormat.colorSpace = DISPLAY_P3; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_004 + * @tc.name : display p3 to srgb + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_004, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = NONE; + outputFormat.colorSpace = SRGB; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_005 + * @tc.name : srgb to hlg rgba + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_005, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_006 + * @tc.name : srgb to hlg p010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_006, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + + +/** + * @tc.number : COLORSPACE_SUPPORT_007 + * @tc.name : p3 to hlg rgba + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_007, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_008 + * @tc.name : p3 to hlg p010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_008, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0010 + * @tc.name : hlg 10bit rgba to p3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0010, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_RGBA_1010102}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0020 + * @tc.name : hlg 10bit rgba to srgb 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0020, TestSize.Level1) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_RGBA_1010102}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0030 + * @tc.name : hlg 10bit nv12 to p3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0030, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_YCBCR_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0040 + * @tc.name : hlg 10bit nv12 to hlg 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0040, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_YCBCR_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0050 + * @tc.name : hlg 10bit nv21 to P3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0050, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_YCRCB_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0060 + * @tc.name : hlg 10bit nv21 to hlg 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0060, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_HLG, + PIXEL_FORMAT_YCRCB_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0070 + * @tc.name : pq 10bit rgba to p3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0070, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_RGBA_1010102}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0080 + * @tc.name : pq 10bit rgba to srgb 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0080, TestSize.Level1) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_RGBA_1010102}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0090 + * @tc.name : pq 10bit nv12 to P3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0090, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_YCBCR_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0100 + * @tc.name : pq 10bit nv12 to hlg 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0100, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_YCBCR_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0110 + * @tc.name : pq 10bit nv21 to p3 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0110, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_YCRCB_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_P3, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : COLORSPACE_DECOMPOSE_0120 + * @tc.name : pq 10bit nv21 to hlg 8bit rgba + * + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_DECOMPOSE_0120, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo sourceImageInfo = {ALTERNATE, + BT2020_PQ, + PIXEL_FORMAT_YCRCB_P010}; + ImageProcessing_ColorSpaceInfo destinationImageInfo = {BASE, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ImageProcessing_ColorSpaceInfo destinationGainmapInfo = {GAINMAP, + DISPLAY_SRGB, + PIXEL_FORMAT_RGBA_8888}; + ASSERT_TRUE(OH_ImageProcessing_IsDecompositionSupported(&sourceImageInfo, &destinationImageInfo, &destinationGainmapInfo)); +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_001 + * @tc.name : hlg rgba1010102 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_001, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_HLG; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + // ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_002 + * @tc.name : hlg p010 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_002, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_HLG; + inputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_003 + * @tc.name : hlg p010_NV21 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_003, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_HLG; + inputFormat.pixelFormat = PIXEL_FORMAT_YCRCB_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_004 + * @tc.name : pq rgba1010102 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_004, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_PQ; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_005 + * @tc.name : pq p010 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_005, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_PQ; + inputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : METADATAGENERATE_SUPPORT_006 + * @tc.name : pq p010_NV21 metadata generate + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, METADATAGENERATE_SUPPORT_006, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + inputFormat.metadataType = ALTERNATE; + inputFormat.colorSpace = BT2020_PQ; + inputFormat.pixelFormat = PIXEL_FORMAT_YCRCB_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(&inputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_001 + * @tc.name : srgb + srgb gainmap to PQ P010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_001, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = SRGB; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_PQ; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_002 + * @tc.name : p3 + p3 gainmap to PQ P010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_002, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = DISPLAY_P3; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_PQ; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_005 + * @tc.name : srgb + srgb gainmap to PQ rgba1010102 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_005, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = SRGB; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_PQ; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_006 + * @tc.name : p3 + p3 gainmap to PQ rgba1010102 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_006, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = DISPLAY_P3; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_PQ; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_007 + * @tc.name : srgb + srgb gainmap to HLG P010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_007, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = SRGB; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_008 + * @tc.name : p3 + p3 gainmap to HLG P010 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_008, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = DISPLAY_P3; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_YCBCR_P010; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : Composition_SUPPORT_011 + * @tc.name : srgb + srgb gainmap to HLG rgba1010102 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_011, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = SRGB; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); +} + +/** + * @tc.number : Composition_SUPPORT_012 + * @tc.name : p3 + p3 gainmap to HLG rgba1010102 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, Composition_SUPPORT_012, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo inputGainmapFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + + inputFormat.metadataType = BASE; + inputFormat.colorSpace = DISPLAY_P3; + inputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + inputGainmapFormat.metadataType = GAINMAP; + inputGainmapFormat.colorSpace = DISPLAY_P3; + inputGainmapFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + + outputFormat.metadataType = ALTERNATE; + outputFormat.colorSpace = BT2020_HLG; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_1010102; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsCompositionSupported(&inputFormat, &inputGainmapFormat, &outputFormat)); + } +} + +/** + * @tc.number : COLORSPACE_SUPPORT_013 + * @tc.name : srgb to display p3 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_013, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo inputFormat; + ImageProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = NONE; + inputFormat.colorSpace = SRGB; + inputFormat.pixelFormat = PIXEL_FORMAT_NV21; + outputFormat.metadataType = NONE; + outputFormat.colorSpace = DISPLAY_P3; + outputFormat.pixelFormat = PIXEL_FORMAT_RGBA_8888; + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } +} + +} // namespace \ No newline at end of file diff --git a/test/ndk/moduletest/image/enum_list.h b/test/ndk/moduletest/image/enum_list.h new file mode 100644 index 0000000000000000000000000000000000000000..3f6aca44efc2919ccbf2d426532c2d48c152734b --- /dev/null +++ b/test/ndk/moduletest/image/enum_list.h @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + enum COLORSPACE{ + UNKNOWN = 0, + ADOBE_RGB_1998 = 1, + DCI_P3 = 2, + DISPLAY_P3 = 3, + SRGB = 4, + BT709 = 6, + BT601_EBU = 7, + BT601_SMPTE_C = 8, + BT2020_HLG = 9, + BT2020_PQ = 10, + P3_HLG = 11, + P3_PQ = 12, + ADOBE_RGB_1998_LIMIT = 13, + DISPLAY_P3_LIMIT = 14, + SRGB_LIMIT = 15, + BT709_LIMIT = 16, + BT601_EBU_LIMIT = 17, + BT601_SMPTE_C_LIMIT = 18, + BT2020_HLG_LIMIT = 19, + BT2020_PQ_LIMIT = 20, + P3_HLG_LIMIT = 21, + P3_PQ_LIMIT = 22, + LINEAR_P3 = 23, + LINEAR_SRGB = 24, + LINEAR_BT709 = LINEAR_SRGB, + LINEAR_BT2020 = 25, + DISPLAY_SRGB = SRGB, + DISPLAY_P3_SRGB = DISPLAY_P3, + DISPLAY_P3_HLG = P3_HLG, + DISPLAY_P3_PQ = P3_PQ, + CUSTOM = 5 + }; + + enum HdrMetadataType { + NONE = 0, + BASE = 1, + GAINMAP = 2, + ALTERNATE = 3 + }; \ No newline at end of file diff --git a/test/ndk/moduletest/image/func_test.cpp b/test/ndk/moduletest/image/func_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..191e12aafe4bf96791b6f544061192652ad828d0 --- /dev/null +++ b/test/ndk/moduletest/image/func_test.cpp @@ -0,0 +1,4228 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include "gtest/gtest.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" + +#define NV12_SIZE(x, y) ((x) * (y) * 3 >> 1) +#define P010_SIZE(x, y) ((x) * (y) * 3) +#define RGBA_SIZE(x, y) ((x) * (y) * 4) + +using namespace std; +using namespace testing::ext; +namespace { +constexpr int32_t DEFAULT_WIDTH = 3840; +constexpr int32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelMap, int32_t width, int32_t height, int format) +{ + OH_Pixelmap_InitializationOptions *options = nullptr; + (void)OH_PixelmapInitializationOptions_Create(&options); + (void)OH_PixelmapInitializationOptions_SetWidth(options, width); + (void)OH_PixelmapInitializationOptions_SetHeight(options, height); + (void)OH_PixelmapInitializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelMap); +} + +static void WritePixels(std::string filePath, OH_PixelmapNative *pixelBuffer, int32_t width, int32_t height, int32_t pixFmt) +{ + auto file = std::make_unique(filePath); + int32_t size = 0; + switch (pixFmt) { + case PIXEL_FORMAT_RGBA_8888: + size = RGBA_SIZE(width, height); + break; + case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: + size = NV12_SIZE(width, height); + break; + case PIXEL_FORMAT_YCBCR_P010: + size = P010_SIZE(width, height); + break; + } + uint8_t *data = new uint8_t[size]; + file->read(reinterpret_cast(data), size); + OH_PixelmapNative_WritePixels(pixelBuffer, data, size); + delete[] data; +} + +static void ReadPixels(std::string filePath, OH_PixelmapNative *pixelBuffer, int32_t size) +{ + auto file = std::make_unique(filePath); + uint8_t *data = new uint8_t[size]; + size_t pixSize = size; + OH_PixelmapNative_ReadPixels(pixelBuffer, data, &pixSize); + file->write(reinterpret_cast(data), size); + delete[] data; +} + +class VpeImageFuncTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeImageFuncTest::SetUpTestCase() +{ + OH_ImageProcessing_InitializeEnvironment(); +} +void VpeImageFuncTest::TearDownTestCase() +{ + OH_ImageProcessing_DeinitializeEnvironment(); +} +void VpeImageFuncTest::SetUp() {} +void VpeImageFuncTest::TearDown() {} +} + +namespace { +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0010 + * @tc.name : test colorspace conversion, srgb nv12 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0010, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0020 + * @tc.name : test colorspace conversion, srgb nv12 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0020, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0030 + * @tc.name : test colorspace conversion, srgb nv12 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0030, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0040 + * @tc.name : test colorspace conversion, srgb nv12 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0040, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0050 + * @tc.name : test colorspace conversion, srgb nv21 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0050, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0060 + * @tc.name : test colorspace conversion, srgb nv21 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0060, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0070 + * @tc.name : test colorspace conversion, srgb nv21 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0070, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0080 + * @tc.name : test colorspace conversion, srgb nv21 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0080, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0090 + * @tc.name : test colorspace conversion, srgb rgba convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0090, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0100 + * @tc.name : test colorspace conversion, srgb rgba convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0100, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0110 + * @tc.name : test colorspace conversion, srgb nv21 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0110, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/srgb_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0120 + * @tc.name : test colorspace conversion, srgb nv21 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0120, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/srgb_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/srgb2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0130 + * @tc.name : test colorspace conversion, p3 nv12 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0130, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0140 + * @tc.name : test colorspace conversion, p3 nv12 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0140, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0150 + * @tc.name : test colorspace conversion, p3 nv12 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0150, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0160 + * @tc.name : test colorspace conversion, p3 nv12 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0160, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0170 + * @tc.name : test colorspace conversion, p3 nv21 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0170, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0180 + * @tc.name : test colorspace conversion, p3 nv21 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0180, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0190 + * @tc.name : test colorspace conversion, p3 nv21 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0190, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0200 + * @tc.name : test colorspace conversion, p3 nv21 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0200, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0210 + * @tc.name : test colorspace conversion, p3 rgba convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0210, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0220 + * @tc.name : test colorspace conversion, p3 rgba convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0220, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p32srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0230 + * @tc.name : test colorspace conversion, p3 rgba convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0230, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/p3_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0240 + * @tc.name : test colorspace conversion, p3 rgba convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0240, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/p3_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/p3top3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0250 + * @tc.name : test colorspace conversion, adobeRGB nv12 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0250, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0260 + * @tc.name : test colorspace conversion, adobeRGB nv12 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0260, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0270 + * @tc.name : test colorspace conversion, adobeRGB nv12 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0270, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0280 + * @tc.name : test colorspace conversion, adobeRGB nv12 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0280, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0290 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0290, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0300 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0300, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0310 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0310, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0320 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0320, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0330 + * @tc.name : test colorspace conversion, adobeRGB rgba convert to srgb nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0330, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0340 + * @tc.name : test colorspace conversion, adobeRGB rgba convert to srgb rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0340, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2srgb_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0350 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to displayP3 nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0350, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/adobeRGB_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_nv12_out.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0360 + * @tc.name : test colorspace conversion, adobeRGB nv21 convert to displayP3 rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0360, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/adobeRGB_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/adobeRGB2p3_rgba_out.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0370 + * @tc.name : test image composition, new vivid nv12 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0370, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0380 + * @tc.name : test image composition, new vivid nv12 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0380, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0390 + * @tc.name : test image composition, new vivid nv12 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0390, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0400 + * @tc.name : test image composition, new vivid nv12 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0400, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0410 + * @tc.name : test image composition, new vivid nv12 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0410, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0420 + * @tc.name : test image composition, new vivid nv12 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0420, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *gainMap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0430 + * @tc.name : test image composition, new vivid nv12 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0430, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0440 + * @tc.name : test image composition, new vivid nv12 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0440, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv12_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0450 + * @tc.name : test image composition, new vivid nv21 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0450, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0460 + * @tc.name : test image composition, new vivid nv21 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0460, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0470 + * @tc.name : test image composition, new vivid nv21 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0470, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0480 + * @tc.name : test image composition, new vivid nv21 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0480, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0490 + * @tc.name : test image composition, new vivid nv21 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0490, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0500 + * @tc.name : test image composition, new vivid nv21 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0500, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0510 + * @tc.name : test image composition, new vivid nv21 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0510, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0520 + * @tc.name : test image composition, new vivid nv21 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0520, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_nv21_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0530 + * @tc.name : test image composition, new vivid rgba with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0530, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0540 + * @tc.name : test image composition, new vivid rgba with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0540, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0550 + * @tc.name : test image composition, new vivid rgba with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0550, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0560 + * @tc.name : test image composition, new vivid rgba with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0560, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0570 + * @tc.name : test image composition, new vivid rgba with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0570, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0580 + * @tc.name : test image composition, new vivid rgba with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0580, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0590 + * @tc.name : test image composition, new vivid rgba with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0590, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0600 + * @tc.name : test image composition, new vivid rgba with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0600, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/new_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/new_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/newvivid_rgba_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0610 + * @tc.name : test image composition, old vivid nv12 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0610, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0620 + * @tc.name : test image composition, old vivid nv12 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0620, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0630 + * @tc.name : test image composition, old vivid nv12 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0630, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0640 + * @tc.name : test image composition, old vivid nv12 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0640, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0650 + * @tc.name : test image composition, old vivid nv12 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0650, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0660 + * @tc.name : test image composition, old vivid nv12 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0660, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0670 + * @tc.name : test image composition, old vivid nv12 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0670, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0680 + * @tc.name : test image composition, old vivid nv12 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0680, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv12_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0690 + * @tc.name : test image composition, old vivid nv21 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0690, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0700 + * @tc.name : test image composition, old vivid nv21 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0700, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0710 + * @tc.name : test image composition, old vivid nv21 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0710, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0720 + * @tc.name : test image composition, old vivid nv21 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0720, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0730 + * @tc.name : test image composition, old vivid nv21 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0730, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0740 + * @tc.name : test image composition, old vivid nv21 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0740, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0750 + * @tc.name : test image composition, old vivid nv21 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0750, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0760 + * @tc.name : test image composition, old vivid nv21 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0760, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_nv21_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0770 + * @tc.name : test image composition, old vivid rgba with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0770, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0780 + * @tc.name : test image composition, old vivid rgba with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0780, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0790 + * @tc.name : test image composition, old vivid rgba with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0790, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0800 + * @tc.name : test image composition, old vivid rgba with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0800, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0810 + * @tc.name : test image composition, old vivid rgba with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0810, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0820 + * @tc.name : test image composition, old vivid rgba with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0820, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0830 + * @tc.name : test image composition, old vivid rgba with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0830, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0840 + * @tc.name : test image composition, old vivid rgba with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0840, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/old_vivid_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/old_vivid_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/oldvivid_rgba_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0850 + * @tc.name : test image composition, iso nv12 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0850, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0860 + * @tc.name : test image composition, iso nv12 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0860, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0870 + * @tc.name : test image composition, iso nv12 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0870, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0880 + * @tc.name : test image composition, iso nv12 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0880, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0890 + * @tc.name : test image composition, iso nv12 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0890, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0900 + * @tc.name : test image composition, iso nv12 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0900, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0910 + * @tc.name : test image composition, iso nv12 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0910, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0920 + * @tc.name : test image composition, iso nv12 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0920, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv12.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv12_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0930 + * @tc.name : test image composition, iso nv21 with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0930, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0940 + * @tc.name : test image composition, iso nv21 with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0940, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0950 + * @tc.name : test image composition, iso nv21 with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0950, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0960 + * @tc.name : test image composition, iso nv21 with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0960, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0970 + * @tc.name : test image composition, iso nv21 with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0970, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0980 + * @tc.name : test image composition, iso nv21 with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0980, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_0990 + * @tc.name : test image composition, iso nv21 with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_0990, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1000 + * @tc.name : test image composition, iso nv21 with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1000, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_nv21.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV21); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_nv21_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1010 + * @tc.name : test image composition, iso rgba with gainmap nv12 ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1010, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_nv12_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1020 + * @tc.name : test image composition, iso rgba with gainmap nv12 ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1020, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_nv12_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1030 + * @tc.name : test image composition, iso rgba with gainmap nv12 ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1030, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_nv12_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1040 + * @tc.name : test image composition, iso rgba with gainmap nv12 ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1040, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_nv12_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1050 + * @tc.name : test image composition, iso rgba with gainmap rgba ,convert to pq 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1050, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_rgba_out_pq_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1060 + * @tc.name : test image composition, iso rgba with gainmap rgba ,convert to pq 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1060, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_rgba_out_pq_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1070 + * @tc.name : test image composition, iso rgba with gainmap rgba ,convert to hlg 10bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1070, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_rgba_out_hlg_nv12.yuv"; + int32_t outSize = P010_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1080 + * @tc.name : test image composition, iso rgba with gainmap rgba ,convert to hlg 10bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1080, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + std::string inFile = "/data/test/media/iso_rgba.yuv"; + std::string inGainmapFile = "/data/test/media/iso_gainmap_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + WritePixels(inGainmapFile, gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/iso_rgba_gainmap_rgba_out_hlg_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); ; + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1090 + * @tc.name : test image decomposition, pq p010 convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1090, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + OH_PixelmapNative *gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1100 + * @tc.name : test image decomposition, pq p010 convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1100, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + OH_PixelmapNative *gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1110 + * @tc.name : test image decomposition, pq p010 convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1110, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1120 + * @tc.name : test image decomposition, pq p010 convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1120, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1130 + * @tc.name : test image decomposition, pq p010 convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1130, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1140 + * @tc.name : test image decomposition, pq p010 convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1140, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1150 + * @tc.name : test image decomposition, pq p010 convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1150, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1160 + * @tc.name : test image decomposition, pq p010 convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1160, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_p010_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1170 + * @tc.name : test image decomposition, pq rgba convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1170, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_rgba_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1180 + * @tc.name : test image decomposition, pq rgba convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1180, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_rgba_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1190 + * @tc.name : test image decomposition, pq rgba convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1190, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_rgba_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/pq_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1200 + * @tc.name : test image decomposition, pq rgba convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1200, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/pq_rgba_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/pq_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1210 + * @tc.name : test image decomposition, hlg nv12 convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1210, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv12_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_nv12_out_rgba.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1220 + * @tc.name : test image decomposition, hlg nv12 convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1220, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv12_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_nv12_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1230 + * @tc.name : test image decomposition, hlg nv12 convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1230, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv12_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_nv12_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1240 + * @tc.name : test image decomposition, hlg nv12 convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1240, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv12_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_nv12_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1250 + * @tc.name : test image decomposition, hlg nv21 convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1250, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv21_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_nv21_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1260 + * @tc.name : test image decomposition, hlg nv21 convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1260, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv21_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_nv21_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1270 + * @tc.name : test image decomposition, hlg nv21 convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1270, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv21_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_nv21_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1280 + * @tc.name : test image decomposition, hlg nv21 convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1280, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_nv21_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_nv21_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1290 + * @tc.name : test image decomposition, hlg rgba convert to nv12 + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1290, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_rgba_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1300 + * @tc.name : test image decomposition, hlg rgba convert to nv12 + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1300, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_rgba_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1310 + * @tc.name : test image decomposition, hlg rgba convert to rgba + gainmap nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1310, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_rgba_out_gainmap_nv12.yuv"; + std::string outFile = "/data/test/media/hlg_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = NV12_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1320 + * @tc.name : test image decomposition, hlg rgba convert to rgba + gainmap rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1320, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* dst = nullptr; + OH_PixelmapNative* gainMap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + CreatePixelmap(&gainMap, DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Decompose(imageProcessor, src, dst, gainMap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + + std::string outGainmapFile = "/data/test/media/hlg_rgba_out_gainmap_rgba.yuv"; + std::string outFile = "/data/test/media/hlg_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + int32_t outGainmapSize = RGBA_SIZE(DEFAULT_WIDTH >> 1, DEFAULT_HEIGHT >> 1); + ReadPixels(outFile, dst, outSize); + ReadPixels(outGainmapFile, gainMap, outGainmapSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1330 + * @tc.name : test image composition, pq p010 convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1330, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1340 + * @tc.name : test image composition, pq p010 convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1340, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1350 + * @tc.name : test image composition, pq p010 convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1350, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_p010_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1360 + * @tc.name : test image composition, pq p010 convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1360, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_p010.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_p010_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1370 + * @tc.name : test image composition, pq rgba convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1370, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1380 + * @tc.name : test image composition, pq rgba convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1380, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/pq_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/pq_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1390 + * @tc.name : test image composition, hlg nv12 convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1390, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_nv12_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1400 + * @tc.name : test image composition, hlg nv12 convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1400, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv12.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCBCR_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_nv12_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1410 + * @tc.name : test image composition, hlg nv21 convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1410, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_nv21_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1420 + * @tc.name : test image composition, hlg nv21 convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1420, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_nv21.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_YCRCB_P010); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_nv21_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1430 + * @tc.name : test image composition, hlg rgba convert to 8bit nv12 + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1430, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_NV12); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_rgba_out_nv12.yuv"; + int32_t outSize = NV12_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_FUNC_TEST_1440 + * @tc.name : test image composition, hlg rgba convert to 8bit rgba + * @tc.desc : function test + */ +HWTEST_F(VpeImageFuncTest, VPE_IMAGE_FUNC_TEST_1440, TestSize.Level0) +{ + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative* src = nullptr; + OH_PixelmapNative* gainMap = nullptr; + OH_PixelmapNative* dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888); + std::string inFile = "/data/test/media/hlg_rgba.yuv"; + WritePixels(inFile, src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102); + ret = OH_ImageProcessing_Compose(imageProcessor, src, gainMap, dst); + + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + std::string outFile = "/data/test/media/hlg_rgba_out_rgba.yuv"; + int32_t outSize = RGBA_SIZE(DEFAULT_WIDTH, DEFAULT_HEIGHT); + ReadPixels(outFile, dst, outSize); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/image/reli_test.cpp b/test/ndk/moduletest/image/reli_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ff7327071fd2377fc45ebde7363d571321a13cc0 --- /dev/null +++ b/test/ndk/moduletest/image/reli_test.cpp @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include "gtest/gtest.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +using namespace std; +using namespace testing::ext; + +namespace { +constexpr int32_t DEFAULT_WIDTH = 3840; +constexpr int32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelMap, int32_t width, int32_t height, int format) +{ + OH_Pixelmap_InitializationOptions *options = nullptr; + (void)OH_PixelmapInitializationOptions_Create(&options); + (void)OH_PixelmapInitializationOptions_SetWidth(options, width); + (void)OH_PixelmapInitializationOptions_SetHeight(options, height); + (void)OH_PixelmapInitializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelMap); +} + +class VpeImageReliTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeImageReliTest::SetUpTestCase() +{ + OH_ImageProcessing_InitializeEnvironment(); +} +void VpeImageReliTest::TearDownTestCase() +{ + OH_ImageProcessing_DeinitializeEnvironment(); +} +void VpeImageReliTest::SetUp() {} +void VpeImageReliTest::TearDown() {} +} + + +namespace { +int32_t TestUnsupportedOutput(int32_t inColorSpace, int32_t inPixFmt) +{ + for (int i = 0; i <= PIXEL_FORMAT_YCRCB_P010; i++) { + for (int j = 0; j <= 1; j++) { + OH_ImageProcessing* imageProcessor = nullptr; + int32_t ret = OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, inColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, i); + ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + ret = OH_ImageProcessing_Destroy(imageProcessor); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + } + } + return 0; +} + +/** + * @tc.number : VPE_IMAGE_RELI_TEST_0010 + * @tc.name : test all unsupported conversion + * @tc.desc : function test + */ +HWTEST_F(VpeImageReliTest, VPE_IMAGE_RELI_TEST_0010, TestSize.Level0) +{ + for (int i = 0; i < PIXEL_FORMAT_YCRCB_P010; i++){ + for (int j = 0; j < 1; j++) { + TestUnsupportedOutput(i, j); + } + } +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/resources/ohos_test.xml b/test/ndk/moduletest/resources/ohos_test.xml new file mode 100644 index 0000000000000000000000000000000000000000..67d74c24ca4fdcbd7b2de2a082d32743cd3710f9 --- /dev/null +++ b/test/ndk/moduletest/resources/ohos_test.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + diff --git a/test/ndk/moduletest/video/BUILD.gn b/test/ndk/moduletest/video/BUILD.gn index 0ecdc806af717a6f127af1c2c64c22be74f423b5..1085e4b117005fdf79ea2964ea0e2f421642fe42 100644 --- a/test/ndk/moduletest/video/BUILD.gn +++ b/test/ndk/moduletest/video/BUILD.gn @@ -12,16 +12,16 @@ # limitations under the License. import("//build/test.gni") -import("//foundation/multimedia/media_foundation/config.gni") -import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +module_output_path = MODULE_TEST_OUTPUT_PATH video_moduletest_native_include_dirs = [ - "$vpe_interface_capi_dir", - "$vpe_capi_root_dir/test/moduletest/common", - "$vpe_capi_root_dir/../../../graphic/graphic_2d/interfaces/inner_api", - "$vpe_capi_root_dir/../../../window/window_manager/interfaces/innerkits", - "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c", - "$vpe_capi_root_dir/../interface/kits/c", + "$CAPI_DIR", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/moduletest/common", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../graphic/graphic_2d/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../window/window_manager/interfaces/innerkits" ] video_moduletest_cflags = [ @@ -48,7 +48,7 @@ video_moduletest_cflags = [ ################################################################################################################## ohos_unittest("vpe_video_native_module_test") { - module_out_path = "media_foundation/moduletest" + module_out_path = module_output_path include_dirs = video_moduletest_native_include_dirs include_dirs += [ "./" ] cflags = video_moduletest_cflags @@ -57,15 +57,12 @@ ohos_unittest("vpe_video_native_module_test") { "api_test.cpp", "capability_test.cpp", "func_test.cpp", - "reli_test.cpp", "video_sample.cpp", + "../common/yuv_viewer.cpp", ] deps = [ - "$vpe_capi_root_dir/framework:video_processing", - "$vpe_capi_root_dir/../../av_codec/interfaces/inner_api/native:av_codec_client", - "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c:capi_packages", - "$vpe_capi_root_dir/../../av_codec/services/services:av_codec_service", + "$FRAMEWORK_DIR:video_processing" ] external_deps = [ @@ -79,4 +76,7 @@ ohos_unittest("vpe_video_native_module_test") { "media_foundation:native_media_core", "window_manager:libwm", ] + + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/ndk/moduletest/resources/ohos_test.xml" } diff --git a/test/ndk/moduletest/video/api_test.cpp b/test/ndk/moduletest/video/api_test.cpp index 4e8ed3c040e9a1d629bf5c9ea7ec136336f9b5f9..3705a266ac8fe35985695dabd0d1978f7b4ddca1 100644 --- a/test/ndk/moduletest/video/api_test.cpp +++ b/test/ndk/moduletest/video/api_test.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024 Huawei Device Co., Ltd. + * Copyright (C) 2024 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -61,26 +61,26 @@ TestConsumerListener::~TestConsumerListener() {} void TestConsumerListener::OnBufferAvailable() {} -const VideoProcessing_ColorSpaceInfo SRC_INFO = {OH_VIDEO_HDR_VIVID, - OH_COLORSPACE_BT2020_HLG_LIMIT, - NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; -const VideoProcessing_ColorSpaceInfo DST_INFO = {OH_VIDEO_HDR_VIVID, +const VideoProcessing_ColorSpaceInfo SRC_INFO = {OH_VIDEO_HDR_HDR10, + OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP}; +const VideoProcessing_ColorSpaceInfo DST_INFO = {OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_PQ_LIMIT, - NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP}; } namespace { static int32_t g_userValue = 1; -static int32_t g_index = 1; +static int32_t g_Index = 1; -static void onErrorEmptyCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, +static void onErrorEmptyCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) { //do nothing } -static void onErrorCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, +static void onErrorCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) { cout << "onErrorCallback" << endl; @@ -119,7 +119,11 @@ static void OnNewOutputBufferCallback(OH_VideoProcessing* videoProcessor, uint32 HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0010, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessing_DeinitializeEnvironment(); } @@ -131,7 +135,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0010, TestSize.Level0) HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0020, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_DeinitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } } /** @@ -142,9 +150,13 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0020, TestSize.Level0) HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0030, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_DeinitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } } /** @@ -156,7 +168,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0040, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } OH_VideoProcessing_DeinitializeEnvironment(); } @@ -169,7 +183,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0050, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -181,7 +197,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0060, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, &DST_INFO); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -193,9 +211,7 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0070, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO); - if (!access("/system/lib64/", 0)) { - ASSERT_TRUE(ret); - } else { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_FALSE(ret); } } @@ -209,7 +225,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0080, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -221,7 +239,7 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0090, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(&SRC_INFO); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_TRUE(ret); } else { ASSERT_FALSE(ret); @@ -238,7 +256,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0100, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); OH_VideoProcessing** videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -250,9 +272,13 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0110, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); OH_VideoProcessing** videoProcessor = nullptr; - VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -265,7 +291,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0120, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessing_Destroy(videoProcessor); } @@ -280,12 +310,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0130, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -297,7 +327,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0140, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Destroy(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -311,12 +343,10 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0150, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Destroy(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -329,7 +359,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0160, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_RegisterCallback(nullptr, nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -342,10 +376,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0170, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_RegisterCallback(nullptr, callback, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); - OH_VideoProcessingCallback_Destroy(callback); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(nullptr, callback, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + OH_VideoProcessingCallback_Destroy(callback); + } } /** @@ -359,13 +395,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0180, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -380,19 +414,15 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0190, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -407,17 +437,15 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0200, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -432,20 +460,19 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0210, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); void* userData = &g_userValue; - if (!userData) { - ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, userData); + if (!userData) + { + ret = OH_VideoProcessing_RegisterCallback( videoProcessor, callback, userData); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -458,7 +485,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0220, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -478,7 +509,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0230, TestSize.Level0) OHNativeWindow *window = nullptr; window = CreateNativeWindowFromSurface(&ps); VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, window); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } OH_NativeWindow_DestroyNativeWindow(window); } @@ -493,14 +528,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0240, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_SetSurface(videoProcessor, nullptr); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -514,23 +547,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0250, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -542,7 +575,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0260, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -562,7 +599,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0270, TestSize.Level0) OHNativeWindow *window = nullptr; window = CreateNativeWindowFromSurface(&ps); VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, &window); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } OH_NativeWindow_DestroyNativeWindow(window); } @@ -577,14 +618,14 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0280, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_GetSurface(videoProcessor, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); - OH_VideoProcessing_Destroy(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_GetSurface(videoProcessor, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -598,23 +639,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0290, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_GetSurface(videoProcessor, &window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -626,7 +667,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0300, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Start(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -640,13 +685,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0310, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -661,25 +704,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0320, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -691,7 +732,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0330, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Stop(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -705,14 +750,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0340, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Stop(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -726,30 +769,25 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0350, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OHNativeWindow *outwindow = nullptr; - ret = OH_VideoProcessing_GetSurface(videoProcessor, &outwindow); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Stop(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -760,8 +798,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0350, TestSize.Level0) HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0360, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); - VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(nullptr, g_index); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(nullptr, g_Index); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -775,14 +817,14 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0370, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, INT_MAX); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -794,7 +836,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0380, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -807,7 +853,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0390, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -820,7 +870,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0400, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Destroy(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -833,9 +887,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0410, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_Destroy(callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_Destroy(callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } } /** @@ -848,7 +904,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0420, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( nullptr, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -861,10 +921,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0430, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError( - callback, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError( + callback, onErrorEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -878,7 +940,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0440, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( nullptr, onErrorCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -891,9 +957,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0450, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -907,7 +975,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0460, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState( nullptr, onStateEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -920,9 +992,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0470, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnState(callback, onStateEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -935,7 +1009,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0480, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState(nullptr, onStateCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -948,9 +1026,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0490, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnState(callback, onStateCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -964,7 +1044,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0500, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( nullptr, OnNewOutputBufferEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -977,9 +1061,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0510, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -993,7 +1079,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0520, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( nullptr, OnNewOutputBufferCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -1006,9 +1096,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0530, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -1023,11 +1115,13 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0540, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } } } } \ No newline at end of file diff --git a/test/ndk/moduletest/video/capability_test.cpp b/test/ndk/moduletest/video/capability_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..46f481ab20f88d40455eca8648872fbaad06aa2f --- /dev/null +++ b/test/ndk/moduletest/video/capability_test.cpp @@ -0,0 +1,897 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "native_buffer.h" + +using namespace std; +using namespace testing::ext; + +namespace { +class VpeVideoCapTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoCapTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoCapTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoCapTest::SetUp() {} +void VpeVideoCapTest::TearDown() {} +} + + + +namespace { +/** + * @tc.number : COLORSPACE_SUPPORT_001 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_001, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_002 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_002, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_003 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_003, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_004 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_004, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_RGB_565; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_005 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_005, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_006 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_006, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_007 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_007, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_008 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_008, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_009 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_009, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0010 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0010, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0011 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0011, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0012 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0012, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0013 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0013, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0014 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0014, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0015 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0015, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0016 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0016, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0017 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0017, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0018 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0018, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0019 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0019, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0020 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0020, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0021 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0021, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0022 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0022, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0023 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0023, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0024 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0024, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0025 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0025, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0026 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0026, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0027 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0027, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0028 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0028, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0029 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0029, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0030 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0030, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0031 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0031, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0032 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0032, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0033 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0033, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0034 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0034, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0035 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0035, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0036 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0036, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0037 + * @tc.name : 异常组合 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0037, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +} +} // namespace \ No newline at end of file diff --git a/test/ndk/moduletest/video/enum_list.h b/test/ndk/moduletest/video/enum_list.h new file mode 100644 index 0000000000000000000000000000000000000000..e43165ef2224c60a865896adcfd46d164d82d094 --- /dev/null +++ b/test/ndk/moduletest/video/enum_list.h @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ENUM_LIST_H +#define ENUM_LIST_H +#include "native_buffer.h" + +int32_t NativeBuffer_ColorSpace[] = { + OH_COLORSPACE_NONE, + OH_COLORSPACE_BT601_EBU_FULL, + OH_COLORSPACE_BT601_SMPTE_C_FULL, + OH_COLORSPACE_BT709_FULL, + OH_COLORSPACE_BT2020_HLG_FULL, + OH_COLORSPACE_BT2020_PQ_FULL, + OH_COLORSPACE_BT601_EBU_LIMIT, + OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + OH_COLORSPACE_BT709_LIMIT, + OH_COLORSPACE_BT2020_HLG_LIMIT, + OH_COLORSPACE_BT2020_PQ_LIMIT, + OH_COLORSPACE_SRGB_FULL, + OH_COLORSPACE_P3_FULL, + OH_COLORSPACE_P3_HLG_FULL, + OH_COLORSPACE_P3_PQ_FULL, + OH_COLORSPACE_ADOBERGB_FULL, + OH_COLORSPACE_SRGB_LIMIT, + OH_COLORSPACE_SRGB_LIMIT, + OH_COLORSPACE_P3_HLG_LIMIT, + OH_COLORSPACE_P3_PQ_LIMIT, + OH_COLORSPACE_ADOBERGB_LIMIT, + OH_COLORSPACE_LINEAR_SRGB, + OH_COLORSPACE_LINEAR_BT709, + OH_COLORSPACE_LINEAR_P3, + OH_COLORSPACE_LINEAR_BT2020, + OH_COLORSPACE_DISPLAY_SRGB, + OH_COLORSPACE_DISPLAY_P3_SRGB, + OH_COLORSPACE_DISPLAY_P3_HLG, + OH_COLORSPACE_DISPLAY_P3_PQ, + OH_COLORSPACE_DISPLAY_BT2020_SRGB, + OH_COLORSPACE_DISPLAY_BT2020_HLG, + OH_COLORSPACE_DISPLAY_BT2020_PQ +}; + +int32_t NativeBuffer_Format[] = { + NATIVEBUFFER_PIXEL_FMT_CLUT8 = 0, + NATIVEBUFFER_PIXEL_FMT_CLUT1, + NATIVEBUFFER_PIXEL_FMT_CLUT4, + NATIVEBUFFER_PIXEL_FMT_RGB_565 = 3, + NATIVEBUFFER_PIXEL_FMT_RGBA_5658, + NATIVEBUFFER_PIXEL_FMT_RGBX_4444, + NATIVEBUFFER_PIXEL_FMT_RGBA_4444, + NATIVEBUFFER_PIXEL_FMT_RGB_444, + NATIVEBUFFER_PIXEL_FMT_RGBX_5551, + NATIVEBUFFER_PIXEL_FMT_RGBA_5551, + NATIVEBUFFER_PIXEL_FMT_RGB_555, + NATIVEBUFFER_PIXEL_FMT_RGBX_8888, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, + NATIVEBUFFER_PIXEL_FMT_RGB_888, + NATIVEBUFFER_PIXEL_FMT_BGR_565, + NATIVEBUFFER_PIXEL_FMT_BGRX_4444, + NATIVEBUFFER_PIXEL_FMT_BGRA_4444, + NATIVEBUFFER_PIXEL_FMT_BGRX_5551, + NATIVEBUFFER_PIXEL_FMT_BGRA_5551, + NATIVEBUFFER_PIXEL_FMT_BGRX_8888, + NATIVEBUFFER_PIXEL_FMT_BGRA_8888 + NATIVEBUFFER_PIXEL_FMT_YUV_422_I, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_P, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_P, + NATIVEBUFFER_PIXEL_FMT_YUYV_422_PKG, + NATIVEBUFFER_PIXEL_FMT_UYVY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_YVYU_422_PKG, + NATIVEBUFFER_PIXEL_FMT_VYUY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, + NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RAW10, + NATIVEBUFFER_PIXEL_FMT_VENDER_MASK, + NATIVEBUFFER_PIXEL_FMT_BUTT +}; + +int32_t NativeBuffer_MetadataType[] = { + OH_VIDEO_HDR_HLG, + OH_VIDEO_HDR_HDR10, + OH_VIDEO_HDR_VIVID +}; + +#endif \ No newline at end of file diff --git a/test/ndk/moduletest/video/func_test.cpp b/test/ndk/moduletest/video/func_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c38a73f85da34fdfc70c2b807e0dc069561f2013 --- /dev/null +++ b/test/ndk/moduletest/video/func_test.cpp @@ -0,0 +1,1283 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "yuv_viewer.h" + +#include "video_sample.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; +namespace { +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + + +class VpeVideoFuncTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoFuncTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoFuncTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoFuncTest::SetUp() +{ +} +void VpeVideoFuncTest::TearDown() +{ +} +} + +namespace { +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0010 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0010, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->inputMetaPath = "/data/test/media/vivid_pq.bin"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0020 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV12,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0020, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0030 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0030, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0040 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV21,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0040, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0050 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0050, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0060 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit RGBA,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0060, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0070 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0070, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0080 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV12,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0080, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0090 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0090, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0100 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV21,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0100, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0110 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0110, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0120 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit RGBA,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0120, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0130 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0130, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0140 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0140, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0150 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0150, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0160 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0160, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0170 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0170, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0180 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0180, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0190 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0190, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0200 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0200, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0210 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0210, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0220 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0220, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0230 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0230, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0240 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0240, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0250 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0250, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0260 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0260, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0270 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0270, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0280 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0280, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0290 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0290, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0300 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0300, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0310 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV12,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0310, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0320 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV12,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0320, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0330 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV21,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0330, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0340 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV21,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0340, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0350 + * @tc.name : test HDR2HDR ,src colorspace PQ@RGBA,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0350, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0360 + * @tc.name : test HDR2HDR ,src colorspace PQ@RGBA,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0360, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0370 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV12,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0370, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0380 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV12,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0380, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0390 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV21,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0390, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0400 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV21,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0400, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0410 + * @tc.name : test HDR2HDR ,src colorspace HLG@RGBA,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0410, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0420 + * @tc.name : test HDR2HDR ,src colorspace HLG@RGBA,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0420, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0430 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV12,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0430, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0440 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV12,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0440, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0450 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV21,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0450, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0460 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV21,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0460, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0470 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@RGBA,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0470, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0480 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@RGBA,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0480, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0490 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV12,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0490, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0500 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV12,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0500, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0510 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV21,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0510, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0520 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV21,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0520, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0530 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@RGBA,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0530, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0540 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@RGBA,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0540, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/video/reli_test.cpp b/test/ndk/moduletest/video/reli_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..818878e4d1138d8139ea3c9e83462f694e2961b3 --- /dev/null +++ b/test/ndk/moduletest/video/reli_test.cpp @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "yuv_viewer.h" +#include "enum_list.h" +#include "video_sample.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; + +namespace { +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + + +class VpeVideoReliTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoReliTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoReliTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoReliTest::SetUp() +{ +} +void VpeVideoReliTest::TearDown() +{ +} +} + +namespace { +int32_t TestUnsupportedOutput(int32_t inColorSpace, int32_t inPixFmt) +{ + for (int i : NativeBuffer_ColorSpace) { + for (int j : NativeBuffer_Format) { + for (int k : NativeBuffer_MetadataType) { + std::unique_ptr sample = std::make_unique(); + sample->inputFrameNumber = 1; + VideoProcessParam param = {inPixFmt, inColorSpace, j, i}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + EXPECT_NE(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } + } + } +} + +/** + * @tc.number : VPE_VIDEO_RELI_TEST_0010 + * @tc.name : test all unsupported convert options + * @tc.desc : function test + */ +HWTEST(VpeVideoReliTest, VPE_VIDEO_RELI_TEST_0010, TestSize.Level0) +{ + for (int i : NativeBuffer_ColorSpace) { + for (int j : NativeBuffer_Format) { + TestUnsupportedOutput(i, j); + } + } +} + +/** + * @tc.number : METADATASUPPORT_001 + * @tc.name : test all unsupported metadata generation + * @tc.desc : function test + */ +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_001, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + for (int i : NativeBuffer_MetadataType) { + for (int j: NativeBuffer_ColorSpace) { + for (int k : NativeBuffer_Format) { + inputFormat.metadataType = i; + inputFormat.colorSpace = j; + inputFormat.pixelFormat = k; + bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(inputFormat); + } + } + } +} + + +void CheckCapability(VideoProcessing_ColorSpaceInfo inputFormat) +{ + if(formatImage.colorSpace == OH_COLORSPACE_SRGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_SRGB_LIMIT || + formatImage.colorSpace == OH_COLORSPACE_LINEAR_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_BT2020_SRGB) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } + if(formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_HLG || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_PQ) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } + if(formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_LIMIT) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } +} + +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_002, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo formatImage; + for (int i : NativeBuffer_MetadataType) { + for (int j: NativeBuffer_ColorSpace) { + for (int k : NativeBuffer_Format) { + formatImage.metadataType = i; + formatImage.colorSpace = j; + formatImage.pixelFormat = k; + CheckCapability(formatImage); + } + } + } +} + +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_003, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo formatImage; + for (int i : NativeBuffer_MetadataType) { + for (int j: NativeBuffer_ColorSpace) { + for (int k : NativeBuffer_Format) { + formatImage.metadataType = i; + formatImage.colorSpace = j; + formatImage.pixelFormat = k; + cout<<"--metadataType--" << i << "--colorSpace--" << j << "--pixelFormat--" << k << endl; + } + } + if (formatImage.colorSpace == OH_COLORSPACE_SRGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_SRGB_LIMIT || + formatImage.colorSpace == OH_COLORSPACE_LINEAR_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_BT2020_SRGB) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + cout<<"return true"<< endl; + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else if (formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_HLG || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_PQ) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + cout<<"return true"<< endl; + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else if (formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_LIMIT) { + if (formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/video/video_sample.cpp b/test/ndk/moduletest/video/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..62ac5189b89b5ddc5247923ca686052acc462c6a --- /dev/null +++ b/test/ndk/moduletest/video/video_sample.cpp @@ -0,0 +1,257 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include "video_sample.h" +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +static uint32_t g_onErrorCount = 0; +static VideoProcessing_State g_state = VIDEO_PROCESSING_STATE_STOPPED; +static std::mutex g_Mutex; +static std::condition_variable g_Cond; +constexpr std::chrono::seconds STOP_TIMEOUT(10); +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +class TestConsumerListener : public IBufferConsumerListener { +public: + TestConsumerListener(sptr cs, std::string_view name) : cs(cs) {}; + ~TestConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + int32_t flushFence; + cs->AcquireBuffer(buffer, flushFence, timestamp, damage); + + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + Rect damage = {}; + sptr cs {nullptr}; +}; + + + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + g_onErrorCount++; + std::cout << "OnError callback recv errorcode:" << error << std::endl; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); + if (ret != VIDEO_PROCESSING_SUCCESS) { + g_onErrorCount++; + std::cout << "Render output buffer failed,errcode: "<< ret << std::endl; + } +} + +VideoSample::~VideoSample() +{ + if (inFile != nullptr) { + if (inFile->is_open()) { + inFile->close(); + } + inFile.reset(); + inFile = nullptr; + } + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + if (metaData) { + delete[] metaData; + metaData = nullptr; + } + + OH_VideoProcessing_Destroy(videoProcessor); +} + +int32_t VideoSample::InitVideoSample(const int32_t type, int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + param_ = param; + viewer = std::make_unique(); + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, type); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + //outWindow = viewer->CreateWindow(width, height, param.outFmt, param.outColorSpace, isHDRVividOut); + + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener(cs, OUT_DIR); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + + if (isHDRVivid) { + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc() +{ + inFile = std::make_unique(inputFilePath); + for (int32_t i = 0; i < inputFrameNumber; i++) { + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + if (inFile->is_open()){ + if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_P010 || + param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_P010) { + ReadOneFrameP010(reinterpret_cast(virAddr), config); + } else if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_1010102){ + ReadOneFrameRGBA10(reinterpret_cast(virAddr), config); + } + inFile->seekg(0, ios::beg); + } + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + if (isHDRVivid) { + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + CHECK_AND_RETURN_RET(err == 0, err, "set OH_HDR_METADATA_TYPE failed."); + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CHECK_AND_RETURN_RET(err == 0, err, "set OH_VIDEO_HDR_VIVID failed."); + } + } + return 0; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + inputLoop_ = make_unique(&VideoSample::InputFunc, this); + return VIDEO_PROCESSING_SUCCESS; +} + +int32_t VideoSample::WaitAndStopSample() +{ + inputLoop_->join(); + int32_t ret = OH_VideoProcessing_Stop(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Stop failed."); + unique_lock lock(g_Mutex); + if (g_Cond.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) { + std::cout << "waiting stop state timeout" << std::endl; + } + return g_onErrorCount; +} + +int32_t VideoSample::ReadOneFrameP010(uint8_t *addr, OH_NativeBuffer_Config config) +{ + uint8_t *start = addr; + // copy Y + for (uint32_t i = 0; i < config.height; i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint16_t)); + addr += config.stride; + } + // copy UV + for (uint32_t i = 0; i < config.height / sizeof(uint16_t); i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint16_t)); + addr += config.stride; + } + return addr - start; +} + +int32_t VideoSample::ReadOneFrameRGBA10(uint8_t *addr, OH_NativeBuffer_Config config) +{ + uint8_t *start = addr; + for (uint32_t i = 0; i < height_; i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint32_t)); + addr += config.stride; + } + return addr - start; +} diff --git a/test/ndk/moduletest/video/video_sample.h b/test/ndk/moduletest/video/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..1dacbc5e8dfc131dbd5e96017d9321cbc7b640cb --- /dev/null +++ b/test/ndk/moduletest/video/video_sample.h @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VIDEO_SAMPLE_H +#define VIDEO_SAMPLE_H +#include +#include +#include +#include +#include +#include "yuv_viewer.h" +#include "video_processing.h" +#include "surface/window.h" +#include "native_buffer.h" + +#define CHECK_AND_RETURN_RET(cond, ret, msg) \ + if (1) { \ + if (!(cond)) { \ + std::cout<< msg <<" ErrCode:"<< ret << std::endl;\ + return ret; \ + } \ + } else void (0) + +typedef struct VideoProcessParam { + OH_NativeBuffer_Format inFmt; + OH_NativeBuffer_ColorSpace inColorSpace; + OH_NativeBuffer_Format outFmt; + OH_NativeBuffer_ColorSpace outColorSpace; +}VideoProcessParam; + +namespace OHOS { +class VideoSample { +public: + VideoSample() = default; + ~VideoSample(); + + int32_t inputFrameNumber = 100; + std::string inputFilePath = ""; + std::string inputMetaPath = ""; + bool isHDRVivid = false; + bool isHDRVividOut = false; + int32_t InputFunc(); + int32_t InitVideoSample(const int32_t type, int32_t width, int32_t height, VideoProcessParam param); + int32_t StartProcess(); + int32_t WaitAndStopSample(); + int32_t errCount = 0; + std::string OUT_DIR = ""; +private: + void SetInputWindowParam(); + + int32_t width_ = 0; + int32_t height_ = 0; + VideoProcessParam param_; + int32_t ReadOneFrameP010(uint8_t *addr, OH_NativeBuffer_Config config); + int32_t ReadOneFrameRGBA10(uint8_t *addr, OH_NativeBuffer_Config config); + OH_VideoProcessing* videoProcessor = nullptr; + std::unique_ptr viewer; + OHNativeWindow *inWindow = nullptr; + OHNativeWindow *outWindow = nullptr; + std::unique_ptr inFile; + std::unique_ptr metaDataFile; + std::unique_ptr inputLoop_; + VideoProcessing_Callback* callback = nullptr; + struct Region region; + struct Region::Rect *rect; + uint8_t *metaData = nullptr; + int32_t metadataSize = 0; +}; +} + + + +#endif \ No newline at end of file