diff --git a/test/fuzztest/iamgecompose_fuzzer/BUILD.gn b/test/fuzztest/iamgecompose_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..fe1cf433d66a4fbe9d49d15b516d9877aebaf947 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/BUILD.gn @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagecomposeFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/iamgecompose_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ + "./" + ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "iamgecompose_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..25d8d7ef4d502154e8585dfbee21ac46ecb0d9c4 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.cpp @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "iamgecompose_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + OH_ImageProcessing* imageProcessor = nullptr; + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COMPOSITION); + + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_PixelmapNative *srcPic = nullptr; + OH_PixelmapNative *srcGainmap = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&srcPic, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + CreatePixelmap(&srcGainmap, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, dstColorSpace); + OH_PixelmapNative_WritePixels(srcPic, const_cast(data), size); + OH_PixelmapNative_WritePixels(srcGainmap, const_cast(data), size); + bool ret = OH_ImageProcessing_Compose(imageProcessor, srcPic, srcGainmap, dst); + OH_PixelmapNative_Release(srcPic); + OH_PixelmapNative_Release(srcGainmap); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + OH_ImageProcessing_Destroy(imageProcessor); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..21a169f678694cb351b582422684b5e102f25dd5 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/iamgecompose_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_COMPOSE_FUZZER_HEADER +#define IMAGE_COMPOSE_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/iamgecompose_fuzzer/project.xml b/test/fuzztest/iamgecompose_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/iamgecompose_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn b/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..2604010322da85f0b0843eaa3b80617ccda17eb0 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/BUILD.gn @@ -0,0 +1,48 @@ +# Copyright (c) 2025 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#####################hydra-fuzz################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +##############################fuzztest########################################## +ohos_fuzztest("Imagesdr2hdrFuzzTest") { + module_out_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagesdr2hdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagesdr2hdr_fuzzer.cpp", + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixcelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init b/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init new file mode 100644 index 0000000000000000000000000000000000000000..8a4c0f702f037977084c459e8700bd657e860de4 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/corpus/init @@ -0,0 +1,15 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +FUZZ \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cfbae46e46adb97c3826cb510d7e1f4263add4c7 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.cpp @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagesdr2hdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +using namespace OHOS; +using namespace OHOS::Media; +using namespace OHOS::MediaAVCodec; +using namespace OHOS::MediaAVCodec::Codec; +using namespace std; +namespace { + +} // namespace + +void VDecServerSample::CallBack::OnError(AVCodecErrorType errorType, int32_t errorCode) +{ + cout << "--OnError--" << endl; + tester->isRunning_.store(false); + tester->signal_->inCond_.notify_all(); +} + +void VDecServerSample::CallBack::OnOutputFormatChanged(const Format &format) +{ + tester->GetOutputFormat(); +} + +void VDecServerSample::CallBack::OnInputBufferAvailable(uint32_t index, std::shared_ptr buffer) +{ + unique_lock lock(tester->signal_->inMutex_); + tester->signal_->inIdxQueue_.push(index); + tester->signal_->inBufferQueue_.push(buffer); + tester->signal_->inCond_.notify_all(); +} + +void VDecServerSample::CallBack::OnOutputBufferAvailable(uint32_t index, std::shared_ptr buffer) +{ + tester->codec_->ReleaseOutputBuffer(index); +} + +VDecServerSample::~VDecServerSample() +{ + if (codec_ != nullptr) { + codec_->Stop(); + codec_->Release(); + } + if (signal_ != nullptr) { + delete signal_; + signal_ = nullptr; + } +} + +int32_t VDecServerSample::ConfigServerDecoder() +{ + Format fmt; + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, 1); + fmt.PutDoubleValue(MediaDescriptionKey::MD_KEY_FRAME_RATE, frameRate); + fmt.PutIntValue(MediaDescriptionKey::MD_KEY_ROTATION_ANGLE, 0); + return codec_->Configure(fmt); +} + +int32_t VDecServerSample::SetCallback() +{ + shared_ptr cb = make_shared(this); + return codec_->SetCallback(cb); +} + +void VDecServerSample::RunVideoServerDecoder() +{ + codec_ = make_shared("OH.Media.Codec.Decoder.Video.AVC"); + if (codec_ == nullptr) { + cout << "Create failed" << endl; + return; + } + int32_t err = ConfigServerDecoder(); + if (err != AVCS_ERR_OK) { + cout << "ConfigServerDecoder failed" << endl; + return; + } + signal_ = new VDecSignal(); + if (signal_ == nullptr) { + cout << "Failed to new VDecSignal" << endl; + return; + } + err = SetCallback(); + if (err != AVCS_ERR_OK) { + cout << "SetCallback failed" << endl; + return; + } + err = codec_->Start(); + if (err != AVCS_ERR_OK) { + cout << "Start failed" << endl; + return; + } + isRunning_.store(true); + inputLoop_ = make_unique(&VDecServerSample::InputFunc, this); + if (inputLoop_ == nullptr) { + cout << "Failed to create input loop" << endl; + isRunning_.store(false); + } +} + +void VDecServerSample::InputFunc() +{ + int32_t time = 1000; + while (sendFrameIndex < frameIndex) { + if (!isRunning_.load()) { + break; + } + unique_lock lock(signal_->inMutex_); + signal_->inCond_.wait_for(lock, std::chrono::milliseconds(time), [this]() { + if (!isRunning_.load()) { + cout << "quit signal" << endl; + return true; + } + return signal_->inIdxQueue_.size() > 0; + }); + if (!isRunning_.load() || signal_->inIdxQueue_.size() == 0) { + break; + } + uint32_t index = signal_->inIdxQueue_.front(); + auto buffer = signal_->inBufferQueue_.front(); + signal_->inIdxQueue_.pop(); + signal_->inBufferQueue_.pop(); + lock.unlock(); + if (buffer->memory_ == nullptr) { + isRunning_.store(false); + break; + } + uint8_t *bufferAddr = buffer->memory_->GetAddr(); + if (memcpy_s(bufferAddr, buffer->memory_->GetCapacity(), fuzzData, fuzzSize) != EOK) { + break; + } + int32_t err = codec_->QueueInputBuffer(index); + if (err != AVCS_ERR_OK) { + cout << "QueueInputBuffer fail" << endl; + break; + } + sendFrameIndex++; + } +} + +void VDecServerSample::WaitForEos() +{ + if (inputLoop_ && inputLoop_->joinable()) { + inputLoop_->join(); + } +} + +void VDecServerSample::GetOutputFormat() +{ + Format fmt; + int32_t err = codec_->GetOutputFormat(fmt); + if (err != AVCS_ERR_OK) { + cout << "GetOutputFormat fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} + +void VDecServerSample::Flush() +{ + int32_t err = codec_->Flush(); + if (err != AVCS_ERR_OK) { + cout << "Flush fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} + +void VDecServerSample::Reset() +{ + int32_t err = codec_->Reset(); + if (err != AVCS_ERR_OK) { + cout << "Reset fail" << endl; + isRunning_.store(false); + signal_->inCond_.notify_all(); + } +} \ No newline at end of file diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..7dd0768133033b5067241548e75cb4e657137375 --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/imagesdr2hdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_SDR2HDR_FUZZER_HEADER +#define IMAGE_SDR2HDR_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif diff --git a/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml b/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..66e1dcac475475fb101b6f8670ec699e6e9696aa --- /dev/null +++ b/test/fuzztest/iamgesdr2hdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/imagedecompose_fuzzer/BUILD.gn b/test/fuzztest/imagedecompose_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..fe7a09eb1ca4cf18106a3ec1046e61b6df5c5133 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagedecomposeFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagedecompose_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagedecompose_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..30c592dd2ea366dff23fdf9da9df9d288f64cefa --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "iamgedecompose_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + OH_ImageProcessing* imageProcessor = nullptr; + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dstPic = nullptr; + OH_PixelmapNative *dstGainmap = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + CreatePixelmap(&dstPic, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + CreatePixelmap(&dstGainmap, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_Decompose(imageProcessor, src, dstPic, dstGainmap); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dstPic); + OH_PixelmapNative_Release(dstGainmap); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + OH_ImageProcessing_Destroy(imageProcessor); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..429bdbc326b234092410290a32161e4aaf01df14 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/imagedecompose_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_DECOMPOSE_FUZZER_HEADER +#define IMAGE_DECOMPOSE_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/imagedecompose_fuzzer/project.xml b/test/fuzztest/imagedecompose_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagedecompose_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn b/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..678ef55cab2884a9457caf9efb6500f14e18e87c --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("Imagehdr2sdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagehdr2sdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagehdr2sdr_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c2821361b4d9bcecaf287606d881f4b31598e863 --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagehdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..d47275ed50a22b9e2551183b4957a0cca7312be2 --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/imagehdr2sdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_HDR2HDR_FUZZER_HEADER +#define IMAGE_HDR2HDR_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/imagehdr2sdr_fuzzer/project.xml b/test/fuzztest/imagehdr2sdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagehdr2sdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn b/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..f4f3191dd4718ea77ceab3e42f626345f6ea802d --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/BUILD.gn @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("ImagemetadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagemetadatagen_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagemetadatagen_fuzzer.cpp" + ] + external_deps = [ + "graphic_2d:libnative_color_space_manager", + "image_framework:pixelmap", + "c_utils:utils", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..89a7e5739fecea490879ec20c65d78180366ffb1 --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.cpp @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagehdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(BT2020_PQ); + OH_PixelmapNative *src = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_1010102, srcColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_GenerateMetadata(imageProcessor, src); + OH_PixelmapNative_Release(src); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..95de34a88dfb0110cd92e517eb1ddb32af2e1a0e --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/imagemetadatagen_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_METADATA_GEN_FUZZER_HEADER +#define IMAGE_METADATA_GEN_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/imagemetadatagen_fuzzer/project.xml b/test/fuzztest/imagemetadatagen_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagemetadatagen_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn b/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..79590b56803e4a489b73a301f658cce89a1bb2a1 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/BUILD.gn @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("Imagesdr2sdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/imagesdr2sdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "imagesdr2sdr_fuzzer.cpp" + ] + external_deps = [ + "c_utils:utils", + "graphic_2d:libnative_color_space_manager", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "image_framework:pixelmap", + "image_framework:pixelmap_ndk", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fb95c68879eb5e713a5388097b9ea44fb0dc2107 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "imagesdr2sdr_fuzzer.h" +#include "image_processing.h" +#include "image/pixelmap_native.h" +#include "native_color_space_manager.h" +#include +#include + +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +OH_ImageProcessing* imageProcessor = nullptr; + +static void CreatePixelmap(OH_PixelmapNative **pixelmap, int32_t width, int32_t height, int format, + OH_NativeColorSpaceManager *colorSpaceNative) +{ + OH_Pixelmap_InintializationOptions *options = nullptr; + (void)OH_Pixelmap_InintializationOptions_Create(&options); + (void)OH_Pixelmap_InintializationOptions_SetWidth(options, width); + (void)OH_Pixelmap_InintializationOptions_SetHeight(options, height); + (void)OH_Pixelmap_InintializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelmap); + (void)OH_PixelmapNative_SetColorSpaceNative(*pixelmap, colorSpaceNative); +} + +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (imageProcessor == nullptr) { + OH_ImageProcessing_Create(&imageProcessor, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + OH_NativeColorSpaceManager *srcColorSpace = OH_NativeColorSpaceManager_CreateForName(SRGB); + OH_NativeColorSpaceManager *dstColorSpace = OH_NativeColorSpaceManager_CreateForName(DISPLAY_P3); + OH_PixelmapNative *src = nullptr; + OH_PixelmapNative *dst = nullptr; + CreatePixelmap(&src, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, srcColorSpace); + CreatePixelmap(&dst, DEFAULT_WIDTH, DEFAULT_HEIGHT, PIXEL_FORMAT_RGBA_8888, dstColorSpace); + OH_PixelmapNative_WritePixels(src, const_cast(data), size); + bool ret = OH_ImageProcessing_ConvertColorSpace(imageProcessor, src, dst); + OH_PixelmapNative_Release(src); + OH_PixelmapNative_Release(dst); + OH_NativeColorSpaceManager_Destroy(srcColorSpace); + OH_NativeColorSpaceManager_Destroy(dstColorSpace); + return ret; +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..657458bc6c300ee97e9607a4f0ae0650239950af --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/imagesdr2sdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_SDR2SDR_FUZZER_HEADER +#define IMAGE_SDR2SDR_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/imagesdr2sdr_fuzzer/project.xml b/test/fuzztest/imagesdr2sdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..500f6f414493290025d8dea9b066e5d5021a9377 --- /dev/null +++ b/test/fuzztest/imagesdr2sdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + \ No newline at end of file diff --git a/test/fuzztest/resources/ohos_test.xml b/test/fuzztest/resources/ohos_test.xml new file mode 100644 index 0000000000000000000000000000000000000000..68cdede6b585f311c03c0a5040c4261c54a2b310 --- /dev/null +++ b/test/fuzztest/resources/ohos_test.xml @@ -0,0 +1,25 @@ + + + + + + + + + diff --git a/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..85c92d6b64afecb2a6267da570c43bc24991afd5 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/BUILD.gn @@ -0,0 +1,52 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#####################hydra-fuzz################### +import("//build/config/features.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") +module_output_path = "video_processing_engine/fuzztest" +##############################fuzztest########################################## +ohos_fuzztest("VideohdrtohdrFuzzTest") { + module_out_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrtohdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/interfaces/kits" + ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrtohdr_fuzzer.cpp", + "videodec_sample.cpp", + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "hilog:libhilog", + "ipc:ipc_single", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + deps = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework:video_processing" + ] +} \ No newline at end of file diff --git a/test/fuzztest/videohdrtohdr_fuzzer/project.xml b/test/fuzztest/videohdrtohdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e66a8a148101c27bb20fc31840572a11d86b585 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.cpp @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..39f94a0b438c5322d0a9d5c7351cd955594ff484 --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.cpp @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "video_sample.h" + +using namespace std; +using namespace OHOS; +using namespace OHOS::Media; + +#define FUZZ_PROJECT_NAME "videohdrtohdr_fuzzer" +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + +namespace OHOS { + VideoSample *sample = nullptr; + bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) + { + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); + } +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..eacce3d342c01a38a524da56da5448c7d2512a3e --- /dev/null +++ b/test/fuzztest/videohdrtohdr_fuzzer/videohdrtohdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_HDRTOHDR_FUZZER_HEADER +#define VIDEO_HDRTOHDR_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..37a74722da34443ebecd88d2952a0f3a72c3d1a3 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideohdrvividtohdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrvividtohdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrvividtohdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml b/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a9b4619772fd9da61f08a8f774b4d5cb72f103f8 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.cpp @@ -0,0 +1,200 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr int64_t SLEEP_MICROSECONDS = 33333L; +constexpr int THREE = 3; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + if (metadataSize > 0) { + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + } else { + cout << "invalid metadata size" << endl; + metaData = nullptr; + return + } + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t ret = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CHECK_AND_RETURN_RET(ret == 0, ret, "set metadata value failed"); + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * THREE, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(SLEEP_MICROSECONDS); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..69b1da96de782bd5bd3ea04492d76b52c28eb129 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.cpp @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "videohdrvividtohdr_fuzzer.h" +#include "video_sample.h" +#include +#include + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..648fe19204f747a3461781e7e8649788b62f2640 --- /dev/null +++ b/test/fuzztest/videohdrvividtohdr_fuzzer/videohdrvividtohdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_HDRVIVID_TO_HDR_FUZZERR_HEADER +#define VIDEO_HDRVIVID_TO_HDR_FUZZERR_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn b/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..554fc0ee58f7bb74c5933b70e5b932b7bf312e03 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideohdrvividtohdrFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videohdrvividtosdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videohdrvividtosdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml b/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e29e1ebdf5b029c30b3e9f4c4f84b18feae4a66 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.cpp @@ -0,0 +1,191 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t ret = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CHECK_AND_RETURN_RET(ret == 0, ret, "set metadata value failed"); + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..52b3fa7246a501385ab5072b1a2c7e1206d0dec6 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/video_sample.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1c27a90f9cd953c8801833b8fbe62dddac5f062e --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.cpp @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "videohdrvividtosdr_fuzzer.h" +#include "video_sample.h" +#include +#include + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..1f2a2ba397f313a7e05500f613ad812d399cf993 --- /dev/null +++ b/test/fuzztest/videohdrvividtosdr_fuzzer/videohdrvividtosdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_HDRVIVID_TO_SDR_FUZZER_HEADER +#define VIDEO_HDRVIVID_TO_SDR_FUZZER_HEADER + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/fuzztest/videometadatagen_fuzzer/BUILD.gn b/test/fuzztest/videometadatagen_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..e50c615f92a771b94025467e7978351d71158d21 --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideometadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videometadatagen_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videometadatagen_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videometadatagen_fuzzer/project.xml b/test/fuzztest/videometadatagen_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp b/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ae37b1fe1b38bf97c4e9d2386f48354288e6885e --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/video_sample.cpp @@ -0,0 +1,187 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "video_processing_types.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + uint8_t val = OH_VIDEO_HDR_VIVID; + ret = OH_NativeWindow_SetMetadataValue(outWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (ret != 0) { + cout << "set metadata failed" << endl; + } + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + int32_t err = 0; + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + if (err != 0) { + cout << "set metadata type failed" << endl; + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videometadatagen_fuzzer/video_sample.h b/test/fuzztest/videometadatagen_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..06135e9bc96555d599ef8a5378f98ff5e2504959 --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/video_sample.h @@ -0,0 +1,173 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr int64_t SLEEP_MICROSECONDS = 33333L; +constexpr int THREE = 3; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs)(cs) {}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * THREE, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(SLEEP_MICROSECONDS); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp b/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..262b97982c1cd0e6eb6ab92c2ef54de7cf3e7e65 --- /dev/null +++ b/test/fuzztest/videometadatagen_fuzzer/videometadatagen_fuzzer.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include "video_sample.h" + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn b/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..3b444c00bd2b750601442b7f220ea6249749a3ff --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/BUILD.gn @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#####################hydra_fuzz###################### +import("//build/config/features.gni") +import("//build/test.gni") +module_output_path = "video_processing_engine/fuzztest" +############################fuzztest############################# +ohos_fuzztest("VideometadatagenFuzzTest") { + module_output_path = module_output_path + fuzz_config_file = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/videosdrtosdr_fuzzer" + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/video_processing_engine/interface/kits/c" + ] + include_dirs += [ "./" ] + cflags = [ + "-g", + "-O0", + "-Wno-unused-variable", + "-fno-omit-frame-pointer", + ] + sources = [ + "videosdrtosdr_fuzzer.cpp", + "video_sample.cpp" + ] + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "ipc:ipc_single", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "window_manager:libwm", + ] + deps = [ + "$FRAMEWORK_DIR:image_processing" + ] + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/fuzztest/resources/ohos_test.xml" +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/project.xml b/test/fuzztest/videosdrtosdr_fuzzer/project.xml new file mode 100644 index 0000000000000000000000000000000000000000..85e7ef2c1cc6471e288306f6e3dcea5287a78b0e --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/project.xml @@ -0,0 +1,25 @@ + + + + + + 1000 + + 300 + + 4096 + + diff --git a/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c4d6d0fc51346b8c036fdb87ebbe2221d805736b --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.cpp @@ -0,0 +1,178 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "video_sample.h" +#include "securec.h" +#include "sync_fence.h" + +using namespace OHOS; +using namespace std; + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} + +class VPEConsumerListener : public IBufferConsumerListener { +public: + explicit VPEConsumerListener(sptr cs) : cs(cs) {}; + ~VPEConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + int32_t fence = -1; + sptr cs {nullptr}; +}; + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + const size_t bufferSize = config.stride * config.height * 3 / 2; + memcpy_s(addr, bufferSize, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..7ff3d0f5581574c46d7bf260f777511aa2d00e5f --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/video_sample.h @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_SDR2SDR_SAMPLE_H +#define VIDEO_SDR2SDR_SAMPLE_H + +#include +#include +#include +#include +#include "securec.h" +#include "sync_fence.h" +#include "video_sample.h" +using namespace OHOS; +using namespace std; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +namespace { +int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + (void)videoProcessor; + (void)error; + (void)userData; +} + +void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + (void)videoProcessor; + (void)state; + (void)userData; +} + +void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); +} +} // namespace + +class VPEConsumerListener : public IBufferConsumerListener{ +public: + explicit VPEConsumerListener(sptr cs) : cs(cs){}; + ~VPEConsumerListener() {}; + void OnBufferAvailable() override + { + sptr buffer; + Rect damage = {}; + cs->AcquireBuffer(buffer, fence, timestamp, damage); + cs->ReleaseBuffer(buffer, -1); + } +private: + int32_t fence = -1; + int64_t timestamp = 0; + sptr cs {nullptr}; +} + +VideoSample::~VideoSample() +{ + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(outWindow); + OH_NativeWindow_DestroyNativeWindow(inWindow); + if (cs) { + cs->UnregisterConsumerListener(); + } +} + +int32_t VideoSample::InitVideoSample(int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + isRunning = true; + param_ = param; + + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new VPEConsumerListener(cs); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + ret = OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace); + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc(const uint8_t *data, size_t size) +{ + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Map failed."); + uint8_t *addr = reinterpret_cast(virAddr); + memcpy_s(addr, config.stride * config.height * 3, data, size); + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CHECK_AND_RETURN_RET(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + usleep(33333); + return err; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CHECK_AND_RETURN_RET(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +#endif \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1870bfabc4ddc3990c6e56c85a8b4a41290d5f6b --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include "video_sample.h" + +namespace OHOS { +VideoSample *sample = nullptr; +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; +bool DoSomethingInterestingWithMyAPI(const uint8_t *data, size_t size) +{ + if (!sample) { + sample = new VideoSample(); + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + sample->InitVideoSample(DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + sample->StartProcess(); + } + return sample->InputFunc(data, size); +} +} + +/* Fuzzer entry point */ +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + /* Run your code on data */ + OHOS::DoSomethingInterestingWithMyAPI(data, size); + return 0; +} \ No newline at end of file diff --git a/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h new file mode 100644 index 0000000000000000000000000000000000000000..0c725d6261156a44e8e47ecbf1e73df8065175a9 --- /dev/null +++ b/test/fuzztest/videosdrtosdr_fuzzer/videosdrtosdr_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_SDR2SDR_FUZZER_H +#define VIDEO_SDR2SDR_FUZZER_H + +#include +#include +#include +#include +#include +#include + +#endif \ No newline at end of file diff --git a/test/nativedemo/vpe_demo2/BUILD.gn b/test/nativedemo/vpe_demo2/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..255c59b0861ffdf45a78a31ff30e71319fcb6f66 --- /dev/null +++ b/test/nativedemo/vpe_demo2/BUILD.gn @@ -0,0 +1,152 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_executable("vpe_video_nativedemo") { + include_dirs = [ + "$CAPI_DIR", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/moduletest/common", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../../graphic/graphic_2d/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../../window/window_manager/interfaces/innerkits" + ] + + cflags = [ + "-Wall", + "-fno-rtti", + "-fno-exceptions", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wno-unused-parameter", + "-Wno-deprecated-declarations", + ] + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "video_processing_demo.cpp", + "../common/yuv_viewer.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:video_processing" + ] + + external_deps = [ + "c_utils:utils", + "ffmpeg:libohosffmpeg", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + + install_enable = false + + part_name = "video_processing_engine" + subsystem_name = "multimedia" +} + + + + + + + + + + + + + + +video_demo_native_include_dirs = [ + "$vpe_interface_capi_dir", + "$vpe_capi_root_dir/test/moduletest/common", + "$vpe_capi_root_dir/../../../graphic/graphic_2d/interfaces/inner_api", + "$vpe_capi_root_dir/../../../window/window_manager/interfaces/innerkits" +] + +video_nativedemo_cflags = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter", +] + +################################################################################################################## +ohos_executable("vpe_video_native_demo") { + include_dirs = video_demo_native_include_dirs + include_dirs += [ "./" ] + cflags = video_nativedemo_cflags + + sources = [ + "video_processing_demo.cpp", + "../common/yuv_viewer.cpp", + ] + + deps = [ + "$vpe_capi_root_dir/framework:video_processing" + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] + + install_enable = false + + part_name = "video_processing_engine" + subsystem_name = "multimedia" +} diff --git a/test/nativedemo/vpe_demo2/video_processing_demo.cpp b/test/nativedemo/vpe_demo2/video_processing_demo.cpp new file mode 100644 index 0000000000000000000000000000000000000000..901b71bb81163d63d4328d2783db9fe8738f016b --- /dev/null +++ b/test/nativedemo/vpe_demo2/video_processing_demo.cpp @@ -0,0 +1,380 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "avcodec_e2e_demo.h" + +#include +#include +#include +#include +#include +#include +#include "av_common.h" +#include "avcodec_common.h" +#include "avcodec_errors.h" +#include "native_avcodec_videodecoder.h" +#include "native_avcodec_videoencoder.h" +#include "media_description.h" +#include "native_avformat.h" +#include "native_avcodec_base.h" + +using namespace OHOS; +using namespace std; +constexpr int64_t MICRO_IN_SECOND = 1000000L; +constexpr float FRAME_INTERVAL_TIMES = 1.5; +constexpr int32_t AUDIO_BUFFER_SIZE = 1024 * 1024; +constexpr int8_t ARGV_PARAM1 = 1; +constexpr int8_t ARGV_PARAM2 = 2; +constexpr int8_t ARGV_PARAM3 = 3; +constexpr int8_t ARGV_PARAM4 = 4; +constexpr int8_t ARGV_PARAM5 = 5; +constexpr int8_t ARGV_PARAM6 = 6; +constexpr int8_t ARGV_MIN_LENGTH = 7; +constexpr double DEFAULT_FRAME_RATE = 25.0; +constexpr std::chrono::seconds STOP_TIMEOUT(10); + +static uint32_t g_onErrorCount = 0; +static VideoProcessing_State g_state = VIDEO_PROCESSING_STATE_STOPPED; +static std::mutex g_Mutex; +static std::condition_variable g_Cond; + +static int64_t GetFileSize(const char *fileName) +{ + int64_t fileSize = 0; + if (fileName != nullptr) { + struct stat fileStatus {}; + if (stat(fileName, &fileStatus) == 0) { + fileSize = static_cast(fileStatus.st_size); + } + } + return fileSize; +} + +static void OnError(OH_AVCodec *codec, int32_t errorCode, void *userData) +{ + (void)codec; + (void)userData; + cout<<"error :"<(userData); + OH_AVDemuxer_ReadSampleBuffer(demo->demuxer, demo->videoTrackID, buffer); + OH_VideoDecoder_PushInputBuffer(codec, index); +} + +static void OnDecOutputBufferAvailable(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData) +{ + VideoProcessingDemo *demo = static_cast(userData); + OH_AVCodecBufferAttr attr; + OH_AVBuffer_GetBufferAttr(buffer, &attr); + if (attr.flags & AVCODEC_BUFFER_FLAGS_EOS) { + OH_VideoEncoder_NotifyEndOfStream(demo->enc); + } + OH_VideoDecoder_RenderOutputBuffer(codec, index); +} + +static void OnEncStreamChanged(OH_AVCodec *codec, OH_AVFormat *format, void *userData) +{ + cout<<"format changed"<(userData); + OH_AVCodecBufferAttr attr; + OH_AVBuffer_GetBufferAttr(buffer, &attr); + if (attr.flags & AVCODEC_BUFFER_FLAGS_EOS) { + demo->isFinish.store(true); + demo->waitCond.notify_all(); + return; + } + OH_AVMuxer_WriteSampleBuffer(demo->muxer, 0, buffer); + OH_VideoEncoder_FreeOutputBuffer(codec, index); +} + + +static void OnVPEError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + g_onErrorCount++; + std::cout << "OnError callback recv errorcode:" << error << std::endl; +} + +static void OnVPEState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); + if (ret != VIDEO_PROCESSING_SUCCESS) { + g_onErrorCount++; + std::cout << "Render output buffer failed,errcode: "<< ret << std::endl; + } +} + +VideoProcessingDemo::VideoProcessingDemo(const char *file, VideoProcessing_ColorSpaceInfo inInfo, + VideoProcessing_ColorSpaceInfo outInfo) +{ + inInfo_ = inInfo; + outInfo_ = outInfo; + fd = open(file, O_RDONLY); + outFd = open("./output.mp4", O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR); + int64_t size = GetFileSize(file); + inSource = OH_AVSource_CreateWithFD(fd, 0, size); + if (!inSource) { + cout << "create source failed" << endl; + } + OH_VideoProcessing_InitializeEnvironment(); + demuxer = OH_AVDemuxer_CreateWithSource(inSource); + muxer = OH_AVMuxer_Create(outFd, AV_OUTPUT_FORMAT_MPEG_4); + if (!muxer || !demuxer) { + cout << "create muxer demuxer failed" << endl; + } + OH_AVFormat *sourceFormat = OH_AVSource_GetSourceFormat(inSource); + OH_AVFormat_GetIntValue(sourceFormat, OH_MD_KEY_TRACK_COUNT, &trackCount); + for (int32_t index = 0; index < trackCount; index++) { + SetTrackFormat(index); + } + OH_AVFormat_Destroy(sourceFormat); +} + +void VideoProcessingDemo::SetTrackFormat(int32_t index) +{ + OH_AVFormat *trackFormat = OH_AVSource_GetTrackFormat(inSource, index); + OH_AVDemuxer_SelectTrackByID(demuxer, index); + int32_t trackType = -1; + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_TRACK_TYPE, &trackType); + if (trackType == MEDIA_TYPE_VID) { + videoTrackID = index; + OH_AVMuxer_AddTrack(muxer, &muxVideoTrackID, trackFormat); + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_TRACK_TYPE, &trackType); + char *mime = nullptr; + OH_AVFormat_GetStringValue(trackFormat, OH_MD_KEY_CODEC_MIME, &mime); + dec = OH_VideoDecoder_CreateByMime(mime); + if (!needRender) { + enc = OH_VideoEncoder_CreateByMime(OH_AVCODEC_MIMETYPE_VIDEO_HEVC); + } + if (isMetadataGen) { + OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + } else { + OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + } + if (!enc || !dec || !videoProcessor) { + cout << "create codec or processor failed" << endl; + return; + } + int32_t isVividIn = false; + OH_AVFormat_GetIntValue(trackFormat, OH_MD_KEY_VIDEO_IS_HDR_VIVID, &isVividIn); + viewer = std::make_unique(); + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PIXEL_FORMAT, inInfo_.pixelFormat); + OH_VideoDecoder_Configure(dec, trackFormat); + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PIXEL_FORMAT, outInfo_.pixelFormat); + if (outInfo_.colorSpace == OH_COLORSPACE_BT2020_HLG_LIMIT || + outInfo_.colorSpace == OH_COLORSPACE_BT2020_PQ_LIMIT) { + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PROFILE, HEVC_PROFILE_MAIN_10); + isHDRVividOut = isVividIn; + } else { + OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_PROFILE, HEVC_PROFILE_MAIN); + } + if (!needRender) { + OH_VideoEncoder_Configure(enc, trackFormat); + } + } else if (trackType == MEDIA_TYPE_AUD) { + audioTrackID = index; + OH_AVMuxer_AddTrack(muxer, &muxAudioTrackID, trackFormat); + } + OH_AVFormat_Destroy(trackFormat); +} + + +VideoProcessingDemo::~VideoProcessingDemo() +{ + OH_VideoProcessing_Destroy(videoProcessor); + OH_VideoProcessing_DeinitializeEnvironment(); + if (dec) { + OH_VideoDecoder_Destroy(dec); + } + if (enc) { + OH_VideoEncoder_Destroy(enc); + } + if (muxer) { + OH_AVMuxer_Destroy(muxer); + } + if (demuxer) { + OH_AVDemuxer_Destroy(demuxer); + } + if (inSource) { + OH_AVSource_Destroy(inSource); + } + close(fd); + close(outFd); +} + +void VideoProcessingDemo::ConfigureCodec() +{ + if (!needRender) { + OH_AVCodecCallback encCallback; + encCallback.onError = OnError; + encCallback.onStreamChanged = OnEncStreamChanged; + encCallback.onNeedInputBuffer = OnEncInputBufferAvailable; + encCallback.onNewOutputBuffer = OnEncOutputBufferAvailable; + OH_VideoEncoder_RegisterCallback(enc, encCallback, this); + } + OH_AVCodecCallback decCallback; + decCallback.onError = OnError; + decCallback.onStreamChanged = OnDecStreamChanged; + decCallback.onNeedInputBuffer = OnDecInputBufferAvailable; + decCallback.onNewOutputBuffer = OnDecOutputBufferAvailable; + OH_VideoDecoder_RegisterCallback(dec, decCallback, this); + if (needRender) { + outWindow = viewer->CreateWindow(width_, height_, param.outFmt, param.outColorSpace, isHDRVividOut); + } else { + OH_VideoEncoder_GetSurface(enc, &outWindow); + } + int32_t ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + if (ret != VIDEO_PROCESSING_SUCCESS) { + std::cout<<" get vpe input surface failed"<(&VideoProcessingDemo::WriteAudioTrack, this); + } +} + +void VideoProcessingDemo::WaitForEOS() +{ + std::mutex waitMtx; + unique_lock lock(waitMtx); + waitCond.wait(lock, [this]() { + return isFinish.load(); + }); + if (audioThread) { + audioThread->join(); + } + cout << "task finish" << endl; +} + +void VideoProcessingDemo::Stop() +{ + OH_VideoDecoder_Stop(dec); + if (!needRender) { + OH_VideoEncoder_Stop(enc); + } + OH_VideoProcessing_Stop(videoProcessor); + unique_lock lock(g_Mutex); + if (g_Cond.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) { + std::cout << "waiting stop state timeout" << std::endl; + } + OH_AVMuxer_Stop(muxer); +} + + + +int main(int32_t argc, char *argv[]) +{ + if (argc < ARGV_MIN_LENGTH) { + std::cout<< "parameter not enough."<(path, isMetadataGen, inInfo, outInfo); + vpeDemo->needRender = atoi(argv[ARGV_PARAM6]); + vpeDemo->Configure(); + vpeDemo->Start(); + vpeDemo->WaitForEOS(); + vpeDemo->Stop(); + return 0; +} \ No newline at end of file diff --git a/test/nativedemo/vpe_demo2/video_processing_demo.h b/test/nativedemo/vpe_demo2/video_processing_demo.h new file mode 100644 index 0000000000000000000000000000000000000000..48f953bd0453117587641972619fc011145e1ab6 --- /dev/null +++ b/test/nativedemo/vpe_demo2/video_processing_demo.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VIDEO_PROCESSING_DEMO_H +#define VIDEO_PROCESSING_DEMO_H + +#include +#include +#include + +#include "native_avcodec_base.h" +#include "native_avdemuxer.h" +#include "native_avmuxer.h" +#include "native_avsource.h" +#include "native_avformat.h" +#include "video_processing.h" +namespace OHOS { +class VideoProcessingDemo { +public: + VideoProcessingDemo(const char *file, bool isMetaDataGen, + VideoProcessing_ColorSpaceInfo inInfo, + VideoProcessing_ColorSpaceInfo outInfo); + ~VideoProcessingDemo(); + void SetTrackFormat(); + void ConfigureCodec(); + viod ConfigureProcessor(); + void Start(); + void WaitForEOS(); + void Stop(); + void WriteAudioTrack(); + OH_AVDemuxer *demuxer = nullptr; + OH_AVMuxer *muxer = nullptr; + uint32_t videoTrackID = -1; + uint32_t audioTrackID = -1; + uint32_t muxVideoTrackID = -1; + uint32_t muxAudioTrackID = -1; + OH_AVCodec *dec = nullptr; + OH_AVCodec *enc = nullptr; + std::condition_variable waitCond; + std::atomic isFinish; + uint32_t frameDuration = 0; + std::unique_ptr audioThread; + bool needRender = false; +private: + int32_t width_ = 0; + int32_t height_ = 0; + int32_t isHDRVividOut = 0; + VideoProcessing_ColorSpaceInfo inInfo_; + VideoProcessing_ColorSpaceInfo outInfo_; + OH_AVSource *inSource = nullptr; + int32_t trackCount = 0; + int32_t fd; + int32_t outFd; + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_Callback* callback = nullptr; + OHNativeWindow *inWindow = nullptr; + const OHNativeWindow *outWindow = nullptr; + std::unique_ptr viewer; +}; +} + +#endif diff --git a/test/ndk/BUILD.gn b/test/ndk/BUILD.gn index 529184e0269129e8317611ca13c97d846a25bec1..07807777f20647c822801f41424419be5a0ba8fe 100644 --- a/test/ndk/BUILD.gn +++ b/test/ndk/BUILD.gn @@ -12,6 +12,7 @@ # limitations under the License. import("//build/ohos.gni") +import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") group("vpe_module_test") { testonly = true diff --git a/test/ndk/moduletest/common/yuv_viewer.cpp b/test/ndk/moduletest/common/yuv_viewer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cb377cb393e34b67a2e269ed140b6a652f3f653d --- /dev/null +++ b/test/ndk/moduletest/common/yuv_viewer.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_viewer.h" +#include +#include +#include "refbase.h" +#include "surface/window.h" +#include "surface.h" +#include "ui/rs_surface_node.h" +#include "window_option.h" +using namespace OHOS; + +YuvViewer::~YuvViewer() +{ + if (window_) { + OH_NativeWindow_DestroyNativeWindow(window_); + window_ = nullptr; + } + if (rosenWindow_) { + rosenWindow_->Destroy(); + rosenWindow_ = nullptr; + } +} + +OHNativeWindow *YuvViewer::CreateWindow(uint32_t width, uint32_t height, + OH_NativeBuffer_Format pix_fmt, OH_NativeBuffer_ColorSpace color_space, bool isHDRVivid) +{ + sptr surfaceProducer; + sptr option = new Rosen::WindowOption(); + option->SetWindowType(Rosen::WindowType::WINDOW_TYPE_FLOAT); + option->SetWindowMode(Rosen::WindowMode::WINDOW_MODE_FULLSCREEN); + rosenWindow_ = Rosen::Window::Create("VpeDemo", option); + if (rosenWindow_ == nullptr) { + std::cout << "rosen window create failed" << std::endl; + return nullptr; + } + rosenWindow_->SetTurnScreenOn(!rosenWindow_->IsTurnScreenOn()); + rosenWindow_->SetKeepScreenOn(true); + rosenWindow_->Show(); + surfaceProducer = rosenWindow_->GetSurfaceNode()->GetSurface(); + + window_ = CreateNativeWindowFromSurface(&surfaceProducer); + + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_TRANSFORM, 1); // 1: rotation 90° + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_BUFFER_GEOMETRY, + width, height); + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(window_, SET_FORMAT, pix_fmt); + return window_; +} \ No newline at end of file diff --git a/test/ndk/moduletest/common/yuv_viewer.h b/test/ndk/moduletest/common/yuv_viewer.h new file mode 100644 index 0000000000000000000000000000000000000000..a259283cdd8b918ae50d01bfbe5110003022acc1 --- /dev/null +++ b/test/ndk/moduletest/common/yuv_viewer.h @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_YUV_VIEWER_H +#define VPE_TEST_YUV_VIEWER_H +#include "wm/window.h" +#include "surface/native_image.h" +#include "surface/native_buffer.h" +namespace OHOS { +class YuvViewer{ +public: + ~YuvViewer(); + OHNativeWindow *CreateWindow(uint32_t width, uint32_t height, + OH_NativeBuffer_Format pix_fmt, OH_NativeBuffer_ColorSpace color_space, bool isHDRVivid); + +private: + sptr rosenWindow_; + OHNativeWindow *window_ = nullptr; + +}; +} +#endif // VPE_TEST_YUV_VIEWER_H \ No newline at end of file diff --git a/test/ndk/moduletest/resources/ohos_test.xml b/test/ndk/moduletest/resources/ohos_test.xml new file mode 100644 index 0000000000000000000000000000000000000000..67d74c24ca4fdcbd7b2de2a082d32743cd3710f9 --- /dev/null +++ b/test/ndk/moduletest/resources/ohos_test.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + diff --git a/test/ndk/moduletest/video/BUILD.gn b/test/ndk/moduletest/video/BUILD.gn index 7c907cda74d5334435749db5aa2af9102933d7d6..1085e4b117005fdf79ea2964ea0e2f421642fe42 100644 --- a/test/ndk/moduletest/video/BUILD.gn +++ b/test/ndk/moduletest/video/BUILD.gn @@ -12,15 +12,16 @@ # limitations under the License. import("//build/test.gni") -import("//foundation/multimedia/media_foundation/config.gni") +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +module_output_path = MODULE_TEST_OUTPUT_PATH video_moduletest_native_include_dirs = [ - "$vpe_interface_capi_dir", - "$vpe_capi_root_dir/test/moduletest/common", - "$vpe_capi_root_dir/../../../graphic/graphic_2d/interfaces/inner_api", - "$vpe_capi_root_dir/../../../window/window_manager/interfaces/innerkits", - "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c", - "$vpe_capi_root_dir/../interface/kits/c", + "$CAPI_DIR", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/moduletest/common", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../graphic/graphic_2d/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../../window/window_manager/interfaces/innerkits" ] video_moduletest_cflags = [ @@ -47,7 +48,7 @@ video_moduletest_cflags = [ ################################################################################################################## ohos_unittest("vpe_video_native_module_test") { - module_out_path = "media_foundation/moduletest" + module_out_path = module_output_path include_dirs = video_moduletest_native_include_dirs include_dirs += [ "./" ] cflags = video_moduletest_cflags @@ -56,15 +57,12 @@ ohos_unittest("vpe_video_native_module_test") { "api_test.cpp", "capability_test.cpp", "func_test.cpp", - "reli_test.cpp", "video_sample.cpp", + "../common/yuv_viewer.cpp", ] deps = [ - "$vpe_capi_root_dir/framework:video_processing", - "$vpe_capi_root_dir/../../av_codec/interfaces/inner_api/native:av_codec_client", - "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c:capi_packages", - "$vpe_capi_root_dir/../../av_codec/services/services:av_codec_service", + "$FRAMEWORK_DIR:video_processing" ] external_deps = [ @@ -78,4 +76,7 @@ ohos_unittest("vpe_video_native_module_test") { "media_foundation:native_media_core", "window_manager:libwm", ] + + resource_config_file = + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/ndk/moduletest/resources/ohos_test.xml" } diff --git a/test/ndk/moduletest/video/api_test.cpp b/test/ndk/moduletest/video/api_test.cpp index 4e8ed3c040e9a1d629bf5c9ea7ec136336f9b5f9..bf4fe93cdccc5c104537931fe8c71296bfa7943b 100644 --- a/test/ndk/moduletest/video/api_test.cpp +++ b/test/ndk/moduletest/video/api_test.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024 Huawei Device Co., Ltd. + * Copyright (C) 2024 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -61,12 +61,12 @@ TestConsumerListener::~TestConsumerListener() {} void TestConsumerListener::OnBufferAvailable() {} -const VideoProcessing_ColorSpaceInfo SRC_INFO = {OH_VIDEO_HDR_VIVID, - OH_COLORSPACE_BT2020_HLG_LIMIT, - NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; -const VideoProcessing_ColorSpaceInfo DST_INFO = {OH_VIDEO_HDR_VIVID, +const VideoProcessing_ColorSpaceInfo SRC_INFO = {OH_VIDEO_HDR_HDR10, OH_COLORSPACE_BT2020_PQ_LIMIT, - NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP}; +const VideoProcessing_ColorSpaceInfo DST_INFO = {OH_VIDEO_HDR_HLG, + OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP}; } @@ -119,7 +119,11 @@ static void OnNewOutputBufferCallback(OH_VideoProcessing* videoProcessor, uint32 HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0010, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessing_DeinitializeEnvironment(); } @@ -131,7 +135,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0010, TestSize.Level0) HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0020, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_DeinitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + } } /** @@ -142,9 +150,13 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0020, TestSize.Level0) HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0030, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_DeinitializeEnvironment(); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } } /** @@ -156,7 +168,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0040, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } OH_VideoProcessing_DeinitializeEnvironment(); } @@ -169,7 +183,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0050, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -181,7 +197,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0060, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, &DST_INFO); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -193,9 +211,7 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0070, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO); - if (!access("/system/lib64/", 0)) { - ASSERT_TRUE(ret); - } else { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_FALSE(ret); } } @@ -209,7 +225,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0080, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(nullptr); - ASSERT_FALSE(ret); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_FALSE(ret); + } } /** @@ -221,7 +239,7 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0090, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(&SRC_INFO); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_TRUE(ret); } else { ASSERT_FALSE(ret); @@ -238,7 +256,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0100, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); OH_VideoProcessing** videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -252,7 +274,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0110, TestSize.Level0) OH_VideoProcessing** videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -265,7 +291,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0120, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessing_Destroy(videoProcessor); } @@ -280,12 +310,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0130, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -297,7 +327,9 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0140, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Destroy(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -311,12 +343,10 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0150, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Destroy(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -329,7 +359,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0160, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_RegisterCallback(nullptr, nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + } } /** @@ -342,10 +376,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0170, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_RegisterCallback(nullptr, callback, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); - OH_VideoProcessingCallback_Destroy(callback); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(nullptr, callback, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + OH_VideoProcessingCallback_Destroy(callback); + } } /** @@ -359,13 +395,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0180, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -380,19 +414,15 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0190, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -407,17 +437,15 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0200, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -432,20 +460,19 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0210, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); VideoProcessing_Callback* callback = nullptr; ret = OH_VideoProcessingCallback_Create(&callback); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); void* userData = &g_userValue; - if (!userData) { - ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, userData); + if (!userData) + { + ret = OH_VideoProcessing_RegisterCallback( videoProcessor, callback, userData); ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } OH_VideoProcessing_Destroy(videoProcessor); OH_VideoProcessingCallback_Destroy(callback); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -458,7 +485,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0220, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -478,7 +509,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0230, TestSize.Level0) OHNativeWindow *window = nullptr; window = CreateNativeWindowFromSurface(&ps); VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, window); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } OH_NativeWindow_DestroyNativeWindow(window); } @@ -493,14 +528,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0240, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_SetSurface(videoProcessor, nullptr); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -514,23 +547,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0250, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -542,7 +575,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0260, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -562,7 +599,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0270, TestSize.Level0) OHNativeWindow *window = nullptr; window = CreateNativeWindowFromSurface(&ps); VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, &window); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } OH_NativeWindow_DestroyNativeWindow(window); } @@ -577,14 +618,14 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0280, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_GetSurface(videoProcessor, nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); - OH_VideoProcessing_Destroy(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_GetSurface(videoProcessor, nullptr); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -598,23 +639,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0290, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_GetSurface(videoProcessor, &window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); } else { ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -626,7 +667,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0300, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Start(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -640,13 +685,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0310, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } } @@ -661,25 +704,23 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0320, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -691,7 +732,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0330, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_Stop(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -705,14 +750,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0340, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Stop(videoProcessor); ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -726,30 +769,25 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0350, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - sptr cs = Surface::CreateSurfaceAsConsumer(); - sptr listener = new TestConsumerListener( - cs, "/data/test/media/out_320_240_10s.rgba"); - cs->RegisterConsumerListener(listener); - auto p = cs->GetProducer(); - sptr ps = Surface::CreateSurfaceAsProducer(p); - OHNativeWindow *window = nullptr; - window = CreateNativeWindowFromSurface(&ps); - ret = OH_VideoProcessing_SetSurface(videoProcessor, window); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OHNativeWindow *outwindow = nullptr; - ret = OH_VideoProcessing_GetSurface(videoProcessor, &outwindow); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Start(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); ret = OH_VideoProcessing_Stop(videoProcessor); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); - OH_NativeWindow_DestroyNativeWindow(window); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); } + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); } /** @@ -761,7 +799,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0360, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(nullptr, g_index); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + } } /** @@ -775,14 +817,14 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0370, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_METADATA_GENERATION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, INT_MAX); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); - OH_VideoProcessing_Destroy(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, INT_MAX); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); } + OH_VideoProcessing_Destroy(videoProcessor); } /** @@ -794,7 +836,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0380, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -807,7 +853,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0390, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -820,7 +870,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0400, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Destroy(nullptr); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -833,9 +887,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0410, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_Destroy(callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_Destroy(callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } } /** @@ -848,7 +904,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0420, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( nullptr, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -861,10 +921,12 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0430, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError( - callback, onErrorEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError( + callback, onErrorEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -878,7 +940,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0440, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( nullptr, onErrorCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -891,9 +957,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0450, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -907,7 +975,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0460, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState( nullptr, onStateEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -920,9 +992,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0470, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnState(callback, onStateEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -935,7 +1009,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0480, TestSize.Level0) { OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState(nullptr, onStateCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -948,9 +1026,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0490, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnState(callback, onStateCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -964,7 +1044,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0500, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( nullptr, OnNewOutputBufferEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -977,9 +1061,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0510, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferEmptyCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -993,7 +1079,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0520, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( nullptr, OnNewOutputBufferCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + } } /** @@ -1006,9 +1096,11 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0530, TestSize.Level0) OH_VideoProcessing_InitializeEnvironment(); VideoProcessing_Callback* callback = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferCallback); - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } OH_VideoProcessingCallback_Destroy(callback); } @@ -1023,11 +1115,13 @@ HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0540, TestSize.Level0) OH_VideoProcessing* videoProcessor = nullptr; VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); - if (!access("/system/lib64/", 0)) { - ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); - OH_VideoProcessing_Destroy(videoProcessor); - } else { - ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } } } } \ No newline at end of file diff --git a/test/ndk/moduletest/video/capability_test.cpp b/test/ndk/moduletest/video/capability_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..592345f9f6454ddbba22d186908b02b4c4d88e9f --- /dev/null +++ b/test/ndk/moduletest/video/capability_test.cpp @@ -0,0 +1,897 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "native_buffer.h" + +using namespace std; +using namespace testing::ext; + +namespace { +class VpeVideoCapTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoCapTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoCapTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoCapTest::SetUp() {} +void VpeVideoCapTest::TearDown() {} +} + + + +namespace { +/** + * @tc.number : COLORSPACE_SUPPORT_001 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_001, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_002 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_002, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_003 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_003, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_004 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_004, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_RGB_565; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_005 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_005, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_006 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_006, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_007 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_007, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_008 + * @tc.name : HDR10 to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_008, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HDR10; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_009 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_009, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0010 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0010, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0011 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0011, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0012 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0012, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0013 + * @tc.name : HDR vivid(PQ) to HLG + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0013, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + outputFormat.metadataType = OH_VIDEO_HDR_HLG; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0014 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0014, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0015 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0015, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_FULL; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0016 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0016, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0017 + * @tc.name : HDR vivid(HLG) to HDR vivid(PQ) + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0017, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0018 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0018, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0019 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0019, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0020 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0020, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_VIVID; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0021 + * @tc.name : HLG to HDR10/HDR vivid + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0021, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_HLG; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0022 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0022, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0023 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0023, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0024 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0024, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0025 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0025, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0026 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0026, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0027 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0027, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0028 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0028, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0029 + * @tc.name : HDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0029, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.metadataType = OH_VIDEO_HDR_VIVID; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.metadataType = OH_VIDEO_HDR_HDR10; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0030 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0030, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0031 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0031, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0032 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0032, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(true, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0033 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0033, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0034 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0034, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + + inputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0035 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0035, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT601_EBU_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0036 + * @tc.name : SDR2SDR + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0036, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT601_SMPTE_C_FULL; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT709_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +/** + * @tc.number : COLORSPACE_SUPPORT_0037 + * @tc.name : 异常组合 + * @tc.desc : api test + */ +HWTEST_F(VpeVideoCapTest, COLORSPACE_SUPPORT_0037, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + VideoProcessing_ColorSpaceInfo outputFormat; + inputFormat.colorSpace = OH_COLORSPACE_BT2020_PQ_LIMIT; + inputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + outputFormat.colorSpace = OH_COLORSPACE_BT2020_HLG_LIMIT; + outputFormat.pixelFormat = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } else { + ASSERT_EQ(false, OH_VideoProcessing_IsColorSpaceConversionSupported(&inputFormat, &outputFormat)); + } + } +} +} +} // namespace \ No newline at end of file diff --git a/test/ndk/moduletest/video/enum_list.h b/test/ndk/moduletest/video/enum_list.h new file mode 100644 index 0000000000000000000000000000000000000000..ac7369352b98e8894eadb7c3ddd2a7f8e0e2b6e2 --- /dev/null +++ b/test/ndk/moduletest/video/enum_list.h @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ENUM_LIST_H +#define ENUM_LIST_H +#include "native_buffer.h" + +int32_t g_nativeBufferColorSpace[] = { + OH_COLORSPACE_NONE, + OH_COLORSPACE_BT601_EBU_FULL, + OH_COLORSPACE_BT601_SMPTE_C_FULL, + OH_COLORSPACE_BT709_FULL, + OH_COLORSPACE_BT2020_HLG_FULL, + OH_COLORSPACE_BT2020_PQ_FULL, + OH_COLORSPACE_BT601_EBU_LIMIT, + OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + OH_COLORSPACE_BT709_LIMIT, + OH_COLORSPACE_BT2020_HLG_LIMIT, + OH_COLORSPACE_BT2020_PQ_LIMIT, + OH_COLORSPACE_SRGB_FULL, + OH_COLORSPACE_P3_FULL, + OH_COLORSPACE_P3_HLG_FULL, + OH_COLORSPACE_P3_PQ_FULL, + OH_COLORSPACE_ADOBERGB_FULL, + OH_COLORSPACE_SRGB_LIMIT, + OH_COLORSPACE_SRGB_LIMIT, + OH_COLORSPACE_P3_HLG_LIMIT, + OH_COLORSPACE_P3_PQ_LIMIT, + OH_COLORSPACE_ADOBERGB_LIMIT, + OH_COLORSPACE_LINEAR_SRGB, + OH_COLORSPACE_LINEAR_BT709, + OH_COLORSPACE_LINEAR_P3, + OH_COLORSPACE_LINEAR_BT2020, + OH_COLORSPACE_DISPLAY_SRGB, + OH_COLORSPACE_DISPLAY_P3_SRGB, + OH_COLORSPACE_DISPLAY_P3_HLG, + OH_COLORSPACE_DISPLAY_P3_PQ, + OH_COLORSPACE_DISPLAY_BT2020_SRGB, + OH_COLORSPACE_DISPLAY_BT2020_HLG, + OH_COLORSPACE_DISPLAY_BT2020_PQ +}; + +int32_t g_nativeBufferFormat[] = { + NATIVEBUFFER_PIXEL_FMT_CLUT8 = 0, + NATIVEBUFFER_PIXEL_FMT_CLUT1, + NATIVEBUFFER_PIXEL_FMT_CLUT4, + NATIVEBUFFER_PIXEL_FMT_RGB_565 = 3, + NATIVEBUFFER_PIXEL_FMT_RGBA_5658, + NATIVEBUFFER_PIXEL_FMT_RGBX_4444, + NATIVEBUFFER_PIXEL_FMT_RGBA_4444, + NATIVEBUFFER_PIXEL_FMT_RGB_444, + NATIVEBUFFER_PIXEL_FMT_RGBX_5551, + NATIVEBUFFER_PIXEL_FMT_RGBA_5551, + NATIVEBUFFER_PIXEL_FMT_RGB_555, + NATIVEBUFFER_PIXEL_FMT_RGBX_8888, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, + NATIVEBUFFER_PIXEL_FMT_RGB_888, + NATIVEBUFFER_PIXEL_FMT_BGR_565, + NATIVEBUFFER_PIXEL_FMT_BGRX_4444, + NATIVEBUFFER_PIXEL_FMT_BGRA_4444, + NATIVEBUFFER_PIXEL_FMT_BGRX_5551, + NATIVEBUFFER_PIXEL_FMT_BGRA_5551, + NATIVEBUFFER_PIXEL_FMT_BGRX_8888, + NATIVEBUFFER_PIXEL_FMT_BGRA_8888 + NATIVEBUFFER_PIXEL_FMT_YUV_422_I, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_P, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_P, + NATIVEBUFFER_PIXEL_FMT_YUYV_422_PKG, + NATIVEBUFFER_PIXEL_FMT_UYVY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_YVYU_422_PKG, + NATIVEBUFFER_PIXEL_FMT_VYUY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, + NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RAW10, + NATIVEBUFFER_PIXEL_FMT_VENDER_MASK, + NATIVEBUFFER_PIXEL_FMT_BUTT +}; + +int32_t g_nativeBufferMetadataType[] = { + OH_VIDEO_HDR_HLG, + OH_VIDEO_HDR_HDR10, + OH_VIDEO_HDR_VIVID +}; + +#endif \ No newline at end of file diff --git a/test/ndk/moduletest/video/func_test.cpp b/test/ndk/moduletest/video/func_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5f936a14744aec8bb02cea175f92a3ed4dfd3d40 --- /dev/null +++ b/test/ndk/moduletest/video/func_test.cpp @@ -0,0 +1,1283 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "yuv_viewer.h" + +#include "video_sample.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; +namespace { +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + + +class VpeVideoFuncTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoFuncTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoFuncTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoFuncTest::SetUp() +{ +} +void VpeVideoFuncTest::TearDown() +{ +} +} + +namespace { +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0010 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0010, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->inputMetaPath = "/data/test/media/vivid_pq.bin"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0020 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV12,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0020, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0030 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0030, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0040 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit NV21,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0040, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0050 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0050, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0060 + * @tc.name : test HDRVivid2SDR ,src colorspace PQ@10bit RGBA,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0060, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0070 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0070, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0080 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV12,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0080, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0090 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0090, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0100 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit NV21,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0100, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0110 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0110, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0120 + * @tc.name : test HDRVivid2SDR ,src colorspace HLG@10bit RGBA,convert to BT709@RGBA8888 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0120, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->isHDRVivid = true; + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0130 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0130, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0140 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0140, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0150 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV12,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0150, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0160 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0160, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0170 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0170, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0180 + * @tc.name : test SDR2SDR ,src colorspace EBU@NV21,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0180, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0190 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0190, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0200 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0200, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0210 + * @tc.name : test SDR2SDR ,src colorspace EBU@RGBA,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0210, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/ebu_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_EBU_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0220 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0220, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0230 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0230, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0240 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV12,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0240, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0250 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0250, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0260 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0260, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0270 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@NV21,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0270, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0280 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0280, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0290 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@NV21 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0290, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0300 + * @tc.name : test SDR2SDR ,src colorspace SMPTEC@RGBA,convert to BT709@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0300, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/smptec_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, OH_COLORSPACE_BT709_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0310 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV12,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0310, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0320 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV12,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0320, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0330 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV21,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0330, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0340 + * @tc.name : test HDR2HDR ,src colorspace PQ@NV21,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0340, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0350 + * @tc.name : test HDR2HDR ,src colorspace PQ@RGBA,convert to HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0350, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0360 + * @tc.name : test HDR2HDR ,src colorspace PQ@RGBA,convert to HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0360, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/pq_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0370 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV12,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0370, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0380 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV12,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0380, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv12.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0390 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV21,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0390, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0400 + * @tc.name : test HDR2HDR ,src colorspace HLG@NV21,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0400, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_nv21.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0410 + * @tc.name : test HDR2HDR ,src colorspace HLG@RGBA,convert to PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0410, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0420 + * @tc.name : test HDR2HDR ,src colorspace HLG@RGBA,convert to PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0420, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/hlg_rgba.yuv"; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0430 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV12,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0430, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0440 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV12,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0440, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0450 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV21,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0450, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0460 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@NV21,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0460, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0470 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@RGBA,convert to Vivid HLG@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0470, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0480 + * @tc.name : test HDR2HDR ,src colorspace Vivid PQ@RGBA,convert to Vivid HLG@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0480, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_pq_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0490 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV12,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0490, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0500 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV12,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0500, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv12.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0510 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV21,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0510, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0520 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@NV21,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0520, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_nv21.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0530 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@RGBA,convert to Vivid PQ@NV12 + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0530, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} + +/** + * @tc.number : VPE_VIDEO_FUNC_TEST_0540 + * @tc.name : test HDR2HDR ,src colorspace Vivid HLG@RGBA,convert to Vivid PQ@RGBA + * @tc.desc : function test + */ +HWTEST_F(VpeVideoFuncTest, VPE_VIDEO_FUNC_TEST_0540, TestSize.Level0) +{ + if (!access("/system/lib64/", 0)) { + std::unique_ptr sample = std::make_unique(); + sample->inputFilePath = "/data/test/media/vivid_hlg_rgba.yuv"; + sample->isHDRVivid = true; + sample->isHDRVividOut = true; + VideoProcessParam param = {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, OH_COLORSPACE_BT2020_PQ_LIMIT}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + ASSERT_EQ(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/video/reli_test.cpp b/test/ndk/moduletest/video/reli_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..44eb89c094125eadb002dd7abd699d151e98429b --- /dev/null +++ b/test/ndk/moduletest/video/reli_test.cpp @@ -0,0 +1,253 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "yuv_viewer.h" +#include "enum_list.h" +#include "video_sample.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; + +namespace { +constexpr uint32_t DEFAULT_WIDTH = 3840; +constexpr uint32_t DEFAULT_HEIGHT = 2160; + + +class VpeVideoReliTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +void VpeVideoReliTest::SetUpTestCase() +{ + OH_VideoProcessing_InitializeEnvironment(); +} +void VpeVideoReliTest::TearDownTestCase() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} +void VpeVideoReliTest::SetUp() +{ +} +void VpeVideoReliTest::TearDown() +{ +} +} + +namespace { +int32_t TestUnsupportedOutput(int32_t inColorSpace, int32_t inPixFmt) +{ + for (int i : g_nativeBufferColorSpace) { + for (int j : g_nativeBufferFormat) { + for (int k : g_nativeBufferMetadataType) { + std::unique_ptr sample = std::make_unique(); + sample->inputFrameNumber = 1; + VideoProcessParam param = {inPixFmt, inColorSpace, j, i}; + int32_t ret = sample->InitVideoSample(VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, + DEFAULT_WIDTH, DEFAULT_HEIGHT, param); + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sample->StartProcess(); + EXPECT_NE(sample->WaitAndStopSample(), VIDEO_PROCESSING_SUCCESS); + } + } + } + } +} + +/** + * @tc.number : VPE_VIDEO_RELI_TEST_0010 + * @tc.name : test all unsupported convert options + * @tc.desc : function test + */ +HWTEST(VpeVideoReliTest, VPE_VIDEO_RELI_TEST_0010, TestSize.Level0) +{ + for (int i : g_nativeBufferColorSpace) { + for (int j : g_nativeBufferFormat) { + TestUnsupportedOutput(i, j); + } + } +} + +/** + * @tc.number : METADATASUPPORT_001 + * @tc.name : test all unsupported metadata generation + * @tc.desc : function test + */ +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_001, TestSize.Level2) +{ + VideoProcessing_ColorSpaceInfo inputFormat; + for (int i : g_nativeBufferMetadataType) { + for (int j: g_nativeBufferColorSpace) { + for (int k : g_nativeBufferFormat) { + inputFormat.metadataType = i; + inputFormat.colorSpace = j; + inputFormat.pixelFormat = k; + bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(inputFormat); + } + } + } +} + + +void CheckCapability(VideoProcessing_ColorSpaceInfo inputFormat) +{ + if(formatImage.colorSpace == OH_COLORSPACE_SRGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_SRGB_LIMIT || + formatImage.colorSpace == OH_COLORSPACE_LINEAR_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_BT2020_SRGB) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } + if(formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_HLG || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_PQ) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } + if(formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_LIMIT) { + if(formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888) { + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } +} + +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_002, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo formatImage; + for (int i : g_nativeBufferMetadataType) { + for (int j: g_nativeBufferColorSpace) { + for (int k : g_nativeBufferFormat) { + formatImage.metadataType = i; + formatImage.colorSpace = j; + formatImage.pixelFormat = k; + CheckCapability(formatImage); + } + } + } +} + +bool ValidatePixelFormat(ImageProcessing_ColorSpaceInfo formatImage) +{ + if (formatImage == nullptr) { + return false; + } + bool ret = (formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP || + formatImage.pixelFormat == NATIVEBUFFER_PIXEL_FMT_RGBA_8888); + return ret; +} + +HWTEST_F(VpeVideoReliTest, METADATASUPPORT_003, TestSize.Level2) +{ + ImageProcessing_ColorSpaceInfo formatImage; + for (int i : g_nativeBufferMetadataType) { + for (int j: g_nativeBufferColorSpace) { + for (int k : g_nativeBufferFormat) { + formatImage.metadataType = i; + formatImage.colorSpace = j; + formatImage.pixelFormat = k; + cout<<"--metadataType--" << i << "--colorSpace--" << j << "--pixelFormat--" << k << endl; + } + } + if (formatImage.colorSpace == OH_COLORSPACE_SRGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_SRGB_LIMIT || + formatImage.colorSpace == OH_COLORSPACE_LINEAR_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_BT2020_SRGB) { + if(ValidatePixelFormat(formatImage)){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + cout<<"return true"<< endl; + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else if (formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_SRGB || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_HLG || + formatImage.colorSpace == OH_COLORSPACE_DISPLAY_P3_PQ) { + if(ValidatePixelFormat(formatImage)){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + cout<<"return true"<< endl; + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else if (formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_FULL || + formatImage.colorSpace == OH_COLORSPACE_ADOBERGB_LIMIT) { + if (ValidatePixelFormat(formatImage)){ + if (!access("/system/lib64/ndk/libvideo_processing_capi_impl.so", 0)) { + if (!access("/system/lib64/media/", 0)) { + ASSERT_EQ(true, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } + } + } else { + cout<<"return false"<< endl; + ASSERT_EQ(false, OH_ImageProcessing_IsMetadataGenerationSupported(formatImage)); + } + } +} +} \ No newline at end of file diff --git a/test/ndk/moduletest/video/video_sample.cpp b/test/ndk/moduletest/video/video_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..205e783c4ddc65fe35568764bad72234ba0ab64b --- /dev/null +++ b/test/ndk/moduletest/video/video_sample.cpp @@ -0,0 +1,254 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include "video_sample.h" +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +static uint32_t g_onErrorCount = 0; +static VideoProcessing_State g_state = VIDEO_PROCESSING_STATE_STOPPED; +static std::mutex g_Mutex; +static std::condition_variable g_Cond; +constexpr std::chrono::seconds STOP_TIMEOUT(10); +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; + +class TestConsumerListener : public IBufferConsumerListener { +public: + TestConsumerListener(sptr cs, std::string_view name) : cs(cs) {}; + ~TestConsumerListener() {} + void OnBufferAvailable() override + { + sptr buffer; + int32_t flushFence; + cs->AcquireBuffer(buffer, flushFence, timestamp, damage); + + cs->ReleaseBuffer(buffer, -1); + } + +private: + int64_t timestamp = 0; + Rect damage = {}; + sptr cs {nullptr}; +}; + +static int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = static_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + return nanoTime / NANOS_IN_MICRO; +} + +static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData) +{ + g_onErrorCount++; + std::cout << "OnError callback recv errorcode:" << error << std::endl; +} + +static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData) +{ + g_state = state; + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + g_Cond.notify_all(); + } + std::cout << "OnState callback called, new state is "<< state << std::endl; +} + +static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index); + if (ret != VIDEO_PROCESSING_SUCCESS) { + g_onErrorCount++; + std::cout << "Render output buffer failed,errcode: "<< ret << std::endl; + } +} + +VideoSample::~VideoSample() +{ + if (inFile != nullptr) { + if (inFile->is_open()) { + inFile->close(); + } + inFile.reset(); + inFile = nullptr; + } + if (callback) { + OH_VideoProcessingCallback_Destroy(callback); + callback = nullptr; + } + if (rect) { + delete rect; + rect = nullptr; + } + if (metaData) { + delete[] metaData; + metaData = nullptr; + } + + OH_VideoProcessing_Destroy(videoProcessor); +} + +int32_t VideoSample::InitVideoSample(const int32_t type, int32_t width, int32_t height, VideoProcessParam param) +{ + width_ = width; + height_ = height; + param_ = param; + viewer = std::make_unique(); + int32_t ret = OH_VideoProcessing_Create(&videoProcessor, type); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Create failed."); + + cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener(cs, OUT_DIR); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + ps = Surface::CreateSurfaceAsProducer(p); + outWindow = CreateNativeWindowFromSurface(&ps); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt); + + ret = OH_VideoProcessing_SetSurface(videoProcessor, outWindow); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_SetSurface failed."); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &inWindow); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_GetSurface failed."); + SetInputWindowParam(); + ret = OH_VideoProcessingCallback_Create(&callback); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessingCallback_Create failed."); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_RegisterCallback failed."); + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoSample::SetInputWindowParam() +{ + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_USAGE, + NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE | + NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER); + (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt); + + if (isHDRVivid) { + metaDataFile = std::make_unique(inputMetaPath); + metaDataFile->seekg(0, ios::end); + metadataSize = metaDataFile->tellg(); + metaDataFile->seekg(0, ios::beg); + metaData = new uint8_t[metadataSize]; + metaDataFile->read(reinterpret_cast(metaData), metadataSize); + } + rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = width_; + rect->h = height_; + region.rects = rect; +} + +int32_t VideoSample::InputFunc() +{ + inFile = std::make_unique(inputFilePath); + for (int32_t i = 0; i < inputFrameNumber; i++) { + int fenceFd = -1; + OHNativeWindowBuffer *ohNativeWindowBuffer; + int32_t err = OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd); + CheckAndRet(err == 0, err, "OH_NativeWindow_NativeWindowRequestBuffer failed."); + if (fenceFd > 0) { + close(fenceFd); + } + OH_NativeBuffer *nativeBuffer = nullptr; + err = OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer); + CheckAndRet(err == 0, err, "OH_NativeBuffer_FromNativeWindowBuffer failed."); + void *virAddr = nullptr; + OH_NativeBuffer_Config config; + OH_NativeBuffer_GetConfig(nativeBuffer, &config); + err = OH_NativeBuffer_Map(nativeBuffer, &virAddr); + CheckAndRet(err == 0, err, "OH_NativeBuffer_Map failed."); + if (inFile->is_open()) { + if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_P010 || + param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_P010) { + ReadOneFrameP010(reinterpret_cast(virAddr), config); + } else if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_1010102) { + ReadOneFrameRGBA10(reinterpret_cast(virAddr), config); + } + inFile->seekg(0, ios::beg); + } + NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + err = OH_NativeBuffer_Unmap(nativeBuffer); + CheckAndRet(err == 0, err, "OH_NativeBuffer_Unmap failed."); + err = OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region); + CheckAndRet(err == 0, err, "OH_NativeWindow_NativeWindowFlushBuffer failed."); + err = OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace); + CheckAndRet(err == 0, err, "OH_NativeWindow_SetColorSpace failed."); + if (isHDRVivid) { + uint8_t val = OH_VIDEO_HDR_VIVID; + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + CheckAndRet(err == 0, err, "set OH_HDR_METADATA_TYPE failed."); + err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData); + CheckAndRet(err == 0, err, "set OH_VIDEO_HDR_VIVID failed."); + } + } + return 0; +} + +int32_t VideoSample::StartProcess() +{ + int32_t ret = OH_VideoProcessing_Start(videoProcessor); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Start failed."); + inputLoop_ = make_unique(&VideoSample::InputFunc, this); + return VIDEO_PROCESSING_SUCCESS; +} + +int32_t VideoSample::WaitAndStopSample() +{ + inputLoop_->join(); + int32_t ret = OH_VideoProcessing_Stop(videoProcessor); + CheckAndRet(ret == VIDEO_PROCESSING_SUCCESS, ret, "OH_VideoProcessing_Stop failed."); + unique_lock lock(g_Mutex); + if (g_Cond.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) { + std::cout << "waiting stop state timeout" << std::endl; + } + return g_onErrorCount; +} + +int32_t VideoSample::ReadOneFrameP010(uint8_t *addr, OH_NativeBuffer_Config config) +{ + uint8_t *start = addr; + // copy Y + for (uint32_t i = 0; i < config.height; i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint16_t)); + addr += config.stride; + } + // copy UV + for (uint32_t i = 0; i < config.height / sizeof(uint16_t); i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint16_t)); + addr += config.stride; + } + return addr - start; +} + +int32_t VideoSample::ReadOneFrameRGBA10(uint8_t *addr, OH_NativeBuffer_Config config) +{ + uint8_t *start = addr; + for (uint32_t i = 0; i < height_; i++) { + inFile->read(reinterpret_cast(addr), width_ * sizeof(uint32_t)); + addr += config.stride; + } + return addr - start; +} diff --git a/test/ndk/moduletest/video/video_sample.h b/test/ndk/moduletest/video/video_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..c37be5fe675ba5f2dd8559f59ee79c1c7c1f74b4 --- /dev/null +++ b/test/ndk/moduletest/video/video_sample.h @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VIDEO_SAMPLE_H +#define VIDEO_SAMPLE_H +#include +#include +#include +#include +#include +#include "yuv_viewer.h" +#include "video_processing.h" +#include "surface/window.h" +#include "native_buffer.h" + +inline int CheckAndRet(bool cond, int ret, char* msg) { + if (!(cond)) { + std::cout<< msg <<" ErrCode:"<< ret << std::endl; + return ret; + } +} + +typedef struct VideoProcessParam { + OH_NativeBuffer_Format inFmt; + OH_NativeBuffer_ColorSpace inColorSpace; + OH_NativeBuffer_Format outFmt; + OH_NativeBuffer_ColorSpace outColorSpace; +}VideoProcessParam; + +namespace OHOS { +class VideoSample { +public: + VideoSample() = default; + ~VideoSample(); + + int32_t inputFrameNumber = 100; + std::string inputFilePath = ""; + std::string inputMetaPath = ""; + bool isHDRVivid = false; + bool isHDRVividOut = false; + int32_t InputFunc(); + int32_t InitVideoSample(const int32_t type, int32_t width, int32_t height, VideoProcessParam param); + int32_t StartProcess(); + int32_t WaitAndStopSample(); + int32_t errCount = 0; + std::string OUT_DIR = ""; +private: + void SetInputWindowParam(); + + int32_t width_ = 0; + int32_t height_ = 0; + VideoProcessParam param_; + int32_t ReadOneFrameP010(uint8_t *addr, OH_NativeBuffer_Config config); + int32_t ReadOneFrameRGBA10(uint8_t *addr, OH_NativeBuffer_Config config); + OH_VideoProcessing* videoProcessor = nullptr; + std::unique_ptr viewer; + OHNativeWindow *inWindow = nullptr; + OHNativeWindow *outWindow = nullptr; + std::unique_ptr inFile; + std::unique_ptr metaDataFile; + std::unique_ptr inputLoop_; + VideoProcessing_Callback* callback = nullptr; + struct Region region; + struct Region::Rect *rect; + uint8_t *metaData = nullptr; + int32_t metadataSize = 0; + }; +} +#endif \ No newline at end of file