diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..cf007af3df1352a594c13700cac7b4106c12efc0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,152 @@ +## c/c++ +# Prerequisites +*.d + +# Compiled Object files +*.ko +*.slo +*.lo +*.o +*.obj +*.elf + +# Linker output +*.ilk +*.map +*.exp + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.so.* +*.dylib +*.dll + +# module files +*.mod +*.smod +*.cmd +.tmp_versions/ +modules.order +Module.symvers +Mkfile.old +dkms.conf + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib +*.lo + +# Executables +*.exe +*.out +*.app +*.i*86 +*.x86_64 +*.hex + +# Debug files +*.dSYM/ +*.su +*.idb +*.pdb + +## cmake +CMakeLists.txt.user +CMakeCache.txt +CMakeFiles +CMakeScripts +Testing +Makefile +cmake_install.cmake +install_manifest.txt +compile_commands.json +CTestTestfile.cmake +_deps + +## ninja +.ninja_deps +.ninja_log + +## vscode +.vscode +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +## jetbrains +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser +.clang-format diff --git a/BUILD.gn b/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..286962c9913af8889dc38cd4e77917e0a3ff845e --- /dev/null +++ b/BUILD.gn @@ -0,0 +1,22 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/config/ohos/config.gni") +import("//build/ohos.gni") + +group("video_processing_engine_packages") { + public_deps = [ + "framework:videoprocessingengine", + "services:videoprocessingservice" + ] +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b --- /dev/null +++ b/LICENSE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/README.md b/README.md index 29b25708a7d6c42db9b13123c7e2b5ccc4cb9696..a96830dc234d08fd26bc7698238d6fb1f736dcc9 100644 --- a/README.md +++ b/README.md @@ -1,37 +1,68 @@ -# multimedia_video_processing_engine +# VPE引擎 -#### 介绍 -针对视频、图片以及渲染的内容的HDR、清晰和流畅低功耗的用户体验,系统层提供统一的公共基础能力,实现简单、一致、高效的视频处理服务。 +## 简介 +VPE(Video Processing Engine)引擎是处理视频和图像数据的媒体引擎,包括细节增强、对比度增强、亮度增强、动态范围增强等基础能力,为转码、分享、显示后处理等提供色彩空间转换、缩放超分、动态元数据集生成等基础算法。 -#### 软件架构 -软件架构说明 +VPE引擎的主要结构如下图所示: +![VPE引擎架构图](./figures/videoProcessingEngine_architecture.png) -#### 安装教程 +## 目录 -1. xxxx -2. xxxx -3. xxxx +仓目录结构如下: -#### 使用说明 +``` +/foundation/multimedia/video_processing_engine/ +├── framework # 框架代码 +│ ├── algorithm # 算法框架 +│ ├── aihdr_enhancer # 图像HDR增强算法框架 +│ ├── aihdr_enhancer_video # 视频HDR增强算法框架 +│ ├── colorspace_converter # 图像颜色空间转换算法框架 +│ ├── colorspace_converter_display # 图像颜色空间显示算法框架 +│ ├── colorspace_converter_video # 视频颜色空间转换算法框架 +│ ├── detail_enhancer # 图像细节增强算法框架 +│ ├── detail_enhancer_video # 视频细节增强算法框架 +│ ├── extension_manager # 插件管理 +│ ├── metadata_generator # 图像元数据生成算法框架 +│ ├── metadata_generator_video # 视频元数据生成算法框架 +│ ├── video_variable_refresh_rate # 视频可变帧率算法框架 +│ ├── capi # NDK层 +│ ├── image_processing # 图像NDK +│ ├── video_processing # 视频NDK +│ ├── dfx # dfx代码 +├── interfaces # 接口层 +│ ├── inner_api # 系统内部接口 +│ ├── kits # 应用接口 +├── services # 服务代码 +├── sertestvices # 测试代码 +``` -1. xxxx -2. xxxx -3. xxxx +## 编译构建 -#### 参与贡献 +编译32位ARM系统VPE引擎 +``` +./build.sh --product-name {product_name} --ccache --build-target video_processing_engine +``` -1. Fork 本仓库 -2. 新建 Feat_xxx 分支 -3. 提交代码 -4. 新建 Pull Request +编译64位ARM系统VPE引擎 +``` +./build.sh --product-name {product_name} --ccache --target-cpu arm64 --build-target video_processing_engine +``` +{product_name}为当前支持的平台,比如rk3568。 -#### 特技 +## 说明 -1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md -2. Gitee 官方博客 [blog.gitee.com](https://blog.gitee.com) -3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解 Gitee 上的优秀开源项目 -4. [GVP](https://gitee.com/gvp) 全称是 Gitee 最有价值开源项目,是综合评定出的优秀开源项目 -5. Gitee 官方提供的使用手册 [https://gitee.com/help](https://gitee.com/help) -6. Gitee 封面人物是一档用来展示 Gitee 会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/) +### 使用说明 +VPE引擎作为OpenHarmony的组件,提供系统的视频图像能力,包含色彩空间转换、动态元数据生成以及细节增强等能力,供开发者进行图像和视频处理操作。 + +## 相关仓 + +- [graphic_graphic_2d](https://gitee.com/openharmony/graphic_graphic_2d) +- [graphic_graphic_surface](https://gitee.com/openharmony/graphic_graphic_surface) +- [multimedia_image_framework](https://gitee.com/openharmony/multimedia_image_framework) +- [multimedia_media_foundation](https://gitee.com/openharmony/multimedia_media_foundation) +- [third_party_egl](https://gitee.com/openharmony/third_party_egl) +- [third_party_opengles](https://gitee.com/openharmony/third_party_opengles) +- [third_party_opencl-headers](https://gitee.com/openharmony/third_party_opencl-headers) +- [third_party_skia](https://gitee.com/openharmony/third_party_skia) \ No newline at end of file diff --git a/README_EN.md b/README_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..65d39a0e5be09e71eea0ce63489466be03fc6766 --- /dev/null +++ b/README_EN.md @@ -0,0 +1,68 @@ +# VPE + +## Overview +The Video Processing Engine (VPE) is a media engine for processing video and image data. It offers a range of fundamental capabilities including enhancements to details, contrast, luminance, and dynamic ranges. It also supports essential algorithms for color space conversion, scaling and upscaling, and dynamic metadata generation for transcoding, sharing, and post-processing for display. + +The following figure demonstrates the VPE architecture. + +![VPE architecture](./figures/videoProcessingEngine_architecture_english.png) + +## Directory Structure + +The structure of the repository directory is as follows: + +``` +/foundation/multimedia/video_processing_engine/ +├── framework # Framework code +│ ├── algorithm # Algorithm framework +│ ├── aihdr_enhancer # Image HDR enhancement algorithm framework +│ ├── aihdr_enhancer_video # Video HDR enhancement algorithm framework +│ ├── colorspace_converter # Image color space conversion algorithm framework +│ ├── colorspace_converter_display # Image color space display algorithm framework +│ ├── colorspace_converter_video # Video color space conversion algorithm framework +│ ├── detail_enhancer # Image detail enhancement algorithm framework +│ ├── detail_enhancer_video # Video detail enhancement algorithm framework +│ ├── extension_manager # Plugin management +│ ├── metadata_generator # Image metadata generation algorithm framework +│ ├── metadata_generator_video # Video metadata generation algorithm framework +│ ├── video_variable_refresh_rate # Video variable frame rate algorithm framework +│ ├── capi # NDK layer +│ ├── image_processing # Image NDK +│ ├── video_processing # Video NDK +│ ├── dfx # DFX code +├── interfaces # API layer +│ ├── inner_api # Internal APIs +│ ├── kits # Application APIs +├── services # Service code +├── sertestvices # Test code +``` + +## Build + +Run the following command to build the VPE for the 32-bit ARM system: +``` +./build.sh --product-name {product_name} --ccache --build-target video_processing_engine +``` + +Run the following command to build the VPE for the 64-bit ARM system: +``` +./build.sh --product-name {product_name} --ccache --target-cpu arm64 --build-target video_processing_engine +``` + +**product_name** indicates the product supported, for example, **rk3568**. + +## Description + +### How to Use +As a component of OpenHarmony, the VPE provides video and image processing capabilities, including color space conversion, dynamic metadata generation, and detail enhancement. + +## Repositories Involved + +- [graphic_graphic_2d](https://gitee.com/openharmony/graphic_graphic_2d) +- [graphic_graphic_surface](https://gitee.com/openharmony/graphic_graphic_surface) +- [multimedia_image_framework](https://gitee.com/openharmony/multimedia_image_framework) +- [multimedia_media_foundation](https://gitee.com/openharmony/multimedia_media_foundation) +- [third_party_egl](https://gitee.com/openharmony/third_party_egl) +- [third_party_opengles](https://gitee.com/openharmony/third_party_opengles) +- [third_party_opencl-headers](https://gitee.com/openharmony/third_party_opencl-headers) +- [third_party_skia](https://gitee.com/openharmony/third_party_skia) diff --git a/bundle.json b/bundle.json new file mode 100644 index 0000000000000000000000000000000000000000..ce185fea03304489583a86eedcfaa0df55678117 --- /dev/null +++ b/bundle.json @@ -0,0 +1,140 @@ +{ + "name": "@ohos/video_processing_engine", + "description": "video_processing_engine", + "version": "4.1", + "license": "Apache License 2.0", + "publishAs": "code-segment", + "segment": { + "destPath": "foundation/multimedia/video_processing_engine" + }, + "dirs": {}, + "scripts": {}, + "component": { + "name": "video_processing_engine", + "subsystem": "multimedia", + "syscap": [], + "features": [], + "adapted_system_type": [ + "standard" + ], + "rom": "10000KB", + "ram": "10000KB", + "hisysevent_config": [], + "deps": { + "components": [ + "c_utils", + "graphic_2d", + "graphic_surface", + "hilog", + "hitrace", + "drivers_interface_display", + "ffrt", + "init", + "hdf_core", + "image_framework", + "media_foundation", + "napi", + "ipc", + "safwk", + "samgr", + "eventhandler" + ], + "third_party": [ + "skia", + "egl", + "opengles", + "bounds_checking_function", + "opencl-headers" + ] + }, + "build": { + "sub_component": [ + "//foundation/multimedia/video_processing_engine/framework:videoprocessingengine", + "//foundation/multimedia/video_processing_engine/services:video_processing_service_group" + ], + "inner_kits": [ + { + "type": "so", + "name": "//foundation/multimedia/video_processing_engine/framework:videoprocessingengine", + "header": { + "header_files": [ + "algorithm_common.h", + "algorithm_errors.h", + "colorspace_converter.h", + "colorspace_converter_display.h", + "metadata_generator.h", + "colorspace_converter_video.h", + "colorspace_converter_video_common.h", + "colorspace_converter_video_description.h", + "detail_enhancer_common.h", + "detail_enhancer_image.h", + "detail_enhancer_video_common.h", + "detail_enhancer_video.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/interfaces/inner_api" + } + }, + { + "name": "//foundation/multimedia/video_processing_engine/services:videoprocessingservice_idl_headers", + "header": { + "header_files": [ + "video_processing_service_manager_proxy.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/services" + } + }, + { + "type": "so", + "name": "//foundation/multimedia/video_processing_engine/framework:image_processing_capi_impl", + "header":{ + "header_files":[ + "image_processing_capi_impl.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/include" + } + }, + { + "type": "so", + "name": "//foundation/multimedia/video_processing_engine/framework:video_processing_capi_impl", + "header":{ + "header_files":[ + "video_processing_capi_impl.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/framework/capi/video_processing/include" + } + }, + { + "type": "so", + "name": "//foundation/multimedia/video_processing_engine/framework:detailEnhancer", + "header":{ + "header_files":[ + "detail_enhance_napi.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/interfaces/kits/js" + } + }, + { + "type": "so", + "name": "//foundation/multimedia/video_processing_engine/framework:videoprocessingenginenapi", + "header":{ + "header_files":[ + "detail_enhance_napi_formal.h", + "native_module_ohos_imageprocessing.h" + ], + "header_base": "//foundation/multimedia/video_processing_engine/interfaces/kits/js" + } + } + ], + "group_type": { + "base_group": [], + "fwk_group": [], + "service_group": [] + }, + "test": [ + "//foundation/multimedia/video_processing_engine/test:demo_test", + "//foundation/multimedia/video_processing_engine/test:unit_test", + "//foundation/multimedia/video_processing_engine/test:module_test" + ] + } + } +} diff --git a/config.gni b/config.gni new file mode 100644 index 0000000000000000000000000000000000000000..671cd55bab03c126385a8f38f187cb6fb1eaf17d --- /dev/null +++ b/config.gni @@ -0,0 +1,90 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +VIDEO_PROCESSING_ENGINE_ROOT_DIR = "//foundation/multimedia/video_processing_engine" + +FRAMEWORK_DIR = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework" +ALGORITHM_DIR = "$FRAMEWORK_DIR/algorithm" +CAPI_DIR = "$FRAMEWORK_DIR/capi" +COMMON_DIR = "$FRAMEWORK_DIR/common" +DFX_DIR = "$FRAMEWORK_DIR/dfx" +PLUGIN_DIR = "$FRAMEWORK_DIR/plugin" + +AIHDR_ENHANCER_DIR = "$ALGORITHM_DIR/aihdr_enhancer" +AIHDR_ENHANCER_VIDEO_DIR = "$ALGORITHM_DIR/aihdr_enhancer_video" +COLORSPACE_CONVERTER_DIR = "$ALGORITHM_DIR/colorspace_converter" +COLORSPACE_CONVERTER_DISPLAY_DIR = "$ALGORITHM_DIR/colorspace_converter_display" +COLORSPACE_CONVERTER_VIDEO_DIR = "$ALGORITHM_DIR/colorspace_converter_video" +METADATA_GENERATOR_DIR = "$ALGORITHM_DIR/metadata_generator" +METADATA_GENERATOR_VIDEO_DIR = "$ALGORITHM_DIR/metadata_generator_video" +ALGORITHM_EXTENSION_MANAGER_DIR = "$ALGORITHM_DIR/extension_manager" +ALGORITHM_COMMON_DIR = "$ALGORITHM_DIR/common" +DETAIL_ENHANCER_DIR = "$ALGORITHM_DIR/detail_enhancer" +DETAIL_ENHANCER_VIDEO_DIR = "$ALGORITHM_DIR/detail_enhancer_video" +VIDEO_REFRESHRATE_PREDICTION_DIR = "$ALGORITHM_DIR/video_variable_refresh_rate" + +#CAPI +CAPI_IMAGE_DIR = "$FRAMEWORK_DIR/capi/image_processing" +CAPI_COLORSPACE_CONVERTER_DIR = "$CAPI_IMAGE_DIR/colorspace_converter" +CAPI_METADATA_GENERATOR_DIR = "$CAPI_IMAGE_DIR/metadata_generator" +CAPI_IMAGE_DETAIL_ENHANCER_DIR = "$CAPI_IMAGE_DIR/detail_enhancer" +CAPI_VIDEO_DIR = "$FRAMEWORK_DIR/capi/video_processing" +CAPI_VIDEO_DETAIL_ENHANCER_DIR = "$CAPI_VIDEO_DIR/detail_enhancer" +CAPI_VIDEO_COLORSPACE_CONVERTER_DIR = "$CAPI_VIDEO_DIR/colorspace_converter" +CAPI_VIDEO_METADATA_GENERATOR_DIR = "$CAPI_VIDEO_DIR/metadata_generator" + +INTERFACES_DIR = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/interfaces" +INTERFACES_INNER_API_DIR = "$INTERFACES_DIR/inner_api" +INTERFACES_CAPI_DIR = "$INTERFACES_DIR/kits/c" +INTERFACES_PLUGIN_DIR = "$INTERFACES_DIR/plugin" + +SKIA_DIR = "//third_party/skia" +EGL_DIR = "//third_party/EGL" +OPENGLES_DIR = "//third_party/openGLES" + +#cuva +CUVA_DIR = "$ALGORITHM_DIR/extensions/colorspace_converter_display/cuva_libs" + +#detail enhancer +IMAGE_AISR_DIR = "$ALGORITHM_DIR/extensions/detail_enhancer/image/ai_super_resolution" +IMAGE_EVE_DIR = "$ALGORITHM_DIR/extensions/detail_enhancer/image/extream_vision_engine" +IMAGE_LIBYUV_DIR = "$ALGORITHM_DIR/extensions/detail_enhancer/image/libyuv" +VIDEO_AISR_DIR = "$ALGORITHM_DIR/extensions/detail_enhancer/video/ai_super_resolution" + +#contrast enhancer +CONTRAST_ENHANCER_DIR = "$ALGORITHM_DIR/contrast_enhancer" + +TEST_UTILS_PATH = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/utils" +UNIT_TEST_OUTPUT_PATH = "video_processing_engine/unittest" +MODULE_TEST_OUTPUT_PATH = "video_processing_engine/moduletest" + +VIDEO_PROCESSING_ENGINE_CFLAGS = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter" +] diff --git a/figures/videoProcessingEngine_architecture.png b/figures/videoProcessingEngine_architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..4cb1c857244e20365c999cd521cc72082a5a4829 Binary files /dev/null and b/figures/videoProcessingEngine_architecture.png differ diff --git a/figures/videoProcessingEngine_architecture_english.png b/figures/videoProcessingEngine_architecture_english.png new file mode 100644 index 0000000000000000000000000000000000000000..d9a70388c02e0d90e39f7d0990215ce34a0fc1f4 Binary files /dev/null and b/figures/videoProcessingEngine_architecture_english.png differ diff --git a/framework/BUILD.gn b/framework/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..49aa7c4b9842aa66add2db55c2550618c4ef0996 --- /dev/null +++ b/framework/BUILD.gn @@ -0,0 +1,501 @@ +# Copyright (c) 2025 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/ability/idl_tool/idl_config.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") +import("$CAPI_DIR/config.gni") + +config("export_config") { + include_dirs = [ + "$AIHDR_ENHANCER_DIR/include", + "$INTERFACES_INNER_API_DIR", + "$DFX_DIR/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$DETAIL_ENHANCER_DIR/include", + "$COLORSPACE_CONVERTER_DIR/include", + "$METADATA_GENERATOR_DIR/include", + "$INTERFACES_INNER_API_DIR/native/colorspace", + "$COLORSPACE_CONVERTER_DISPLAY_DIR/include", + "$VIDEO_REFRESHRATE_PREDICTION_DIR/include", + "$ALGORITHM_DIR/common/include", + ] +} +config("video_process_config") { + cflags = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-Wformat=2", + + # "-Wfloat-equal", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter", + ] + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$FRAMEWORK_DIR/common/include", + "$AIHDR_ENHANCER_DIR/include", + "$AIHDR_ENHANCER_VIDEO_DIR/include", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$COLORSPACE_CONVERTER_DIR/include", + "$COLORSPACE_CONVERTER_DISPLAY_DIR/include", + "$COLORSPACE_CONVERTER_VIDEO_DIR/include", + "$METADATA_GENERATOR_DIR/include", + "$METADATA_GENERATOR_VIDEO_DIR/include", + "$DETAIL_ENHANCER_VIDEO_DIR/include", + "$VIDEO_REFRESHRATE_PREDICTION_DIR/include", + "$DETAIL_ENHANCER_DIR/include", + "$DFX_DIR/include", + "$FRAMEWORK_DIR/plugin", + "$FRAMEWORK_DIR/module", + "$INTERFACES_INNER_API_DIR", + "$INTERFACES_INNER_API_DIR/module", + "$INTERFACES_DIR/plugin", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + + # temporary dir + "$SKIA_DIR", + "${target_gen_dir}/../services/", + "$SKIA_DIR/include/core", + "$SKIA_DIR/include/encode", + "$SKIA_DIR", + "$SKIA_DIR/src/ports/skia_ohos", + "$SKIA_DIR/src/ports", + "$SKIA_DIR/src/images", + "$SKIA_DIR/include/private", + "$SKIA_DIR/include/effects", + "$SKIA_DIR/third_party/externals/angle2/src/common", + "$SKIA_DIR/third_party/externals/angle2/src/", + "$SKIA_DIR/third_party/externals/angle2/src/common/third_party/base/", + "//foundation/graphic/graphic_2d/utils/color_manager/export", + "//base/startup/init/interfaces/innerkits/include/syspara/", + ] +} + +ohos_prebuilt_shared_library("extream_vision_engine") { + if (is_asan && use_hwasan) { + source = "//binary/artifacts/display/AIPQ20/asan/libextream_vision_engine.so" + } else { + source = "//binary/artifacts/display/AIPQ20/libextream_vision_engine.so" + } + module_install_dir = "lib64/" + output = "libextream_vision_engine.so" + install_images = [ "system" ] + subsystem_name = "multimedia" + part_name = "video_processing_engine" + install_enable = true +} + +ohos_prebuilt_shared_library("ai_super_resolution") { + if (is_asan && use_hwasan) { + source = "//binary/artifacts/display/AIPQ20/asan/libdisplay_aipq_imagesr.so" + } else { + source = "//binary/artifacts/display/AIPQ20/libdisplay_aipq_imagesr.so" + } + module_install_dir = "lib64/" + output = "libdisplay_aipq_imagesr.so" + install_images = [ "system" ] + subsystem_name = "multimedia" + part_name = "video_processing_engine" + install_enable = true +} + +ohos_prebuilt_shared_library("aihdr_engine") { + if (is_asan && use_hwasan) { + source = "//binary/artifacts/display/AIPQ20/asan/libaihdr_engine.so" + } else { + source = "//binary/artifacts/display/AIPQ20/libaihdr_engine.so" + } + module_install_dir = "lib64/" + output = "libaihdr_engine.so" + install_images = [ "system" ] + subsystem_name = "multimedia" + part_name = "video_processing_engine" + install_enable = true +} + +ohos_shared_library("videoprocessingengine") { + branch_protector_ret = "pac_ret" + + install_enable = true + + sanitize = { + boundary_sanitize = true + cfi = true + cfi_cross_dso = true + integer_overflow = true + ubsan = true + debug = false + } + + configs = [ ":video_process_config" ] + public_configs = [ ":export_config" ] + + include_dirs = [] + + sources = [ + "$AIHDR_ENHANCER_DIR/aihdr_enhancer_fwk.cpp", + "$AIHDR_ENHANCER_VIDEO_DIR/aihdr_enhancer_video_impl.cpp", + "$ALGORITHM_DIR/common/algorithm_common.cpp", + "$ALGORITHM_DIR/common/algorithm_utils.cpp", + "$ALGORITHM_DIR/common/algorithm_video.cpp", + "$ALGORITHM_DIR/common/algorithm_video_common.cpp", + "$ALGORITHM_DIR/common/algorithm_video_impl.cpp", + "$ALGORITHM_DIR/common/frame_info.cpp", + "$ALGORITHM_DIR/common/vpe_parse_metadata.cpp", + "$ALGORITHM_DIR/extension_manager/extension_manager.cpp", + "$ALGORITHM_DIR/extension_manager/utils.cpp", + "$COLORSPACE_CONVERTER_DIR/colorspace_converter_fwk.cpp", + "$COLORSPACE_CONVERTER_VIDEO_DIR/colorspace_converter_video_impl.cpp", + "$COLORSPACE_CONVERTER_DISPLAY_DIR/colorspace_converter_display_fwk.cpp", + "$METADATA_GENERATOR_DIR/metadata_generator_fwk.cpp", + "$METADATA_GENERATOR_VIDEO_DIR/metadata_generator_video_impl.cpp", + "$DETAIL_ENHANCER_DIR/detail_enhancer_image_fwk.cpp", + "$DETAIL_ENHANCER_VIDEO_DIR/detail_enhancer_video_fwk.cpp", + "$DETAIL_ENHANCER_VIDEO_DIR/detail_enhancer_video_impl.cpp", + "$VIDEO_REFRESHRATE_PREDICTION_DIR/video_refreshrate_prediction_fwk.cpp", + "$DFX_DIR/vpe_trace.cpp", + "$DFX_DIR/vpe_log.cpp", + #client + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/src/video_processing_client.cpp", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/src/video_processing_load_callback.cpp", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/src/surface_buffer_info.cpp", + "${target_gen_dir}/../services/video_processing_service_manager_proxy.cpp", + "${target_gen_dir}/../services/video_processing_service_manager_stub.cpp", + "$ALGORITHM_COMMON_DIR/image_opencl_wrapper.cpp", + "$ALGORITHM_COMMON_DIR/image_openclsetup.cpp" + ] + + deps = [ "//third_party/skia:skia_ohos", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", + ":aihdr_engine", + ":ai_super_resolution", + ":extream_vision_engine", + "//third_party/opencl-headers:libcl", + "//third_party/bounds_checking_function:libsec_static"] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "drivers_interface_display:libdisplay_commontype_proxy_2.1", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "hilog:libhilog", + "hitrace:hitrace_meter", + "init:libbegetutil", + "hdf_core:libhdi", + "hdf_core:libhdf_host", + "hdf_core:libhdf_ipc_adapter", + "hdf_core:libhdf_utils", + "hdf_core:hdf_posix_osal", + "graphic_2d:2d_graphics", + "ipc:ipc_single", + "safwk:system_ability_fwk", + "samgr:samgr_proxy", + "media_foundation:media_foundation", + "graphic_2d:EGL", + "graphic_2d:GLESv3", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} + +config("video_processing_engine_capi_config") { + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_NDK_DIR/interface/kits/c", + "$VIDEO_PROCESSING_ENGINE_NDK_DIR/interface/inner_api", + ] +} + +ohos_shared_library("image_processing_capi_impl") { + sanitize = { + boundary_sanitize = true + cfi = true + cfi_cross_dso = true + integer_overflow = true + ubsan = true + debug = false + } + + stack_protector_ret = true + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + cflags += [ + "-ffunction-sections", + "-fdata-sections", + "-DIMAGE_COLORSPACE_FLAG", + ] + + ldflags = [ "-Wl,--gc-sections" ] + + configs = [ ":video_processing_engine_capi_config" ] + + include_dirs = [ + #Interface + "$INTERFACES_CAPI_DIR", + "$INTERFACES_INNER_API_DIR", + #Common + "$DFX_DIR/include", + "$ALGORITHM_COMMON_DIR/include", + #CAPI implementation + "$CAPI_IMAGE_DIR/include", + "$CAPI_IMAGE_DETAIL_ENHANCER_DIR/include", + "$CAPI_COLORSPACE_CONVERTER_DIR/include", + "$CAPI_METADATA_GENERATOR_DIR/include", + "$DETAIL_ENHANCER_DIR/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "${target_gen_dir}/../services/", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/common/include", + ] + + sources = [ + "$CAPI_IMAGE_DIR/image_environment_native.cpp", + "$CAPI_IMAGE_DIR/image_processing_capi_impl.cpp", + "$CAPI_IMAGE_DIR/image_processing_factory.cpp", + "$CAPI_IMAGE_DIR/image_processing_impl.cpp", + "$CAPI_IMAGE_DIR/image_processing_native_base.cpp", + "$CAPI_IMAGE_DIR/image_processing_utils.cpp", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/common/vpe_utils_common.cpp", + "$CAPI_IMAGE_DETAIL_ENHANCER_DIR/detail_enhancer_image_native.cpp", + "$CAPI_COLORSPACE_CONVERTER_DIR/colorspace_converter_image_native.cpp", + "$CAPI_METADATA_GENERATOR_DIR/metadata_generator_image_native.cpp", + ] + + deps = [ + ":videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "graphic_2d:2d_graphics", + "media_foundation:media_foundation", + "image_framework:pixelmap", + "image_framework:image_native", + "drivers_interface_display:display_commontype_idl_headers", + "ipc:ipc_single", + "safwk:system_ability_fwk", + "samgr:samgr_proxy", + ] + + output_extension = "so" + subsystem_name = "multimedia" + innerapi_tags = [ "ndk" ] + part_name = "video_processing_engine" +} + +ohos_shared_library("video_processing_capi_impl") { + sanitize = { + boundary_sanitize = true + cfi = true + cfi_cross_dso = true + integer_overflow = true + ubsan = true + debug = false + } + + stack_protector_ret = true + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + cflags += [ + "-ffunction-sections", + "-fdata-sections", + ] + + ldflags = [ "-Wl,--gc-sections" ] + + configs = [ ":video_processing_engine_capi_config" ] + + include_dirs = [ + #Interface + "$INTERFACES_CAPI_DIR", + "$INTERFACES_INNER_API_DIR", + #Common + "$DFX_DIR/include", + "$ALGORITHM_COMMON_DIR/include", + #CAPI implementation + "$CAPI_VIDEO_DIR/include/", + "$CAPI_VIDEO_DIR/aihdr_enhancer/include/", + "$CAPI_VIDEO_DETAIL_ENHANCER_DIR/include/", + "$CAPI_VIDEO_COLORSPACE_CONVERTER_DIR/include/", + "//foundation/graphic/graphic_surface/interfaces/inner_api/surface/", + "//foundation/graphic/graphic_surface/surface/include/", + "//foundation/graphic/graphic_2d/interfaces/inner_api", + "$CAPI_VIDEO_METADATA_GENERATOR_DIR/include/", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "${target_gen_dir}/../services/", + ] + + sources = [ + #CAPI common + "$CAPI_VIDEO_DIR/video_environment_native.cpp", + "$CAPI_VIDEO_DIR/video_processing_callback_impl.cpp", + "$CAPI_VIDEO_DIR/video_processing_callback_native.cpp", + "$CAPI_VIDEO_DIR/video_processing_capi_impl.cpp", + "$CAPI_VIDEO_DIR/video_processing_capi_capability.cpp", + "$CAPI_VIDEO_DIR/video_processing_factory.cpp", + "$CAPI_VIDEO_DIR/video_processing_impl.cpp", + "$CAPI_VIDEO_DIR/video_processing_native_base.cpp", + "$CAPI_VIDEO_DIR/video_processing_utils.cpp", + #CAPI - features + #CAPI - detail enhancement + "$CAPI_VIDEO_DETAIL_ENHANCER_DIR/detail_enhancer_video_native.cpp", + "$CAPI_VIDEO_COLORSPACE_CONVERTER_DIR/colorSpace_converter_video_native.cpp", + "$CAPI_VIDEO_METADATA_GENERATOR_DIR/metadata_generator_video_native.cpp", + "$CAPI_VIDEO_DIR/aihdr_enhancer/aihdr_enhancer_video_native.cpp" + ] + + deps = [ + ":videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:media_foundation", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "ipc:ipc_single", + "safwk:system_ability_fwk", + "samgr:samgr_proxy", + ] + + output_extension = "so" + subsystem_name = "multimedia" + innerapi_tags = [ "ndk" ] + part_name = "video_processing_engine" +} + +ohos_shared_library("detailEnhancer") { + sanitize = { + cfi = true + cfi_cross_dso = true + cfi_vcall_icall_only = true + debug = false + } + + defines = [ "IMAGE_COLORSPACE_FLAG" ] + + include_dirs = [ + "$DFX_DIR/include", + "//foundation/multimedia/media_foundation/interface/kits/c", + "//foundation/multimedia/image_framework/interfaces/kits/native/include/image", + "//foundation/multimedia/image_framework/frameworks/kits/js/common/ndk/include", + "//foundation/multimedia/video_processing_engine/interfaces/kits/js", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c", + "//foundation/multimedia/video_processing_engine/interfaces/inner_api", + "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/include/", + ] + sources = [ + "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/detail_enhance_napi.cpp", + "//foundation/multimedia/image_framework/frameworks/innerkitsimpl/common/src/memory_manager.cpp", + ] + + deps = [ + ":videoprocessingengine", + ] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "ipc:ipc_napi", + "media_foundation:native_media_core", + "napi:ace_napi", + "image_framework:image_utils", + "image_framework:image_native", + "image_framework:pixelmap", + "image_framework:image", + "drivers_interface_display:display_commontype_idl_headers", + ] + + output_name = "libdetailenhancer_napi" + subsystem_name = "multimedia" + relative_install_dir = "module/multimedia" + part_name = "video_processing_engine" +} + +ohos_shared_library("videoprocessingenginenapi") { + sanitize = { + cfi = true + cfi_cross_dso = true + cfi_vcall_icall_only = true + debug = false + } + + defines = [ "IMAGE_COLORSPACE_FLAG" ] + + include_dirs = [ + "$DFX_DIR/include", + "$INTERFACES_DIR/kits/js", + "$INTERFACES_INNER_API_DIR", + "$CAPI_DIR/image_processing/include/", + "$ALGORITHM_COMMON_DIR/include/", + ] + sources = [ + "$CAPI_DIR/image_processing/detail_enhance_napi_formal.cpp", + "$INTERFACES_DIR/kits/js/native_module_ohos_imageprocessing.cpp", + ] + + deps = [ + ":videoprocessingengine", + ] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "image_framework:image_utils", + "image_framework:image_native", + "image_framework:pixelmap", + "image_framework:image", + "ipc:ipc_napi", + "media_foundation:native_media_core", + "media_foundation:media_foundation", + "media_foundation:image_processing", + "napi:ace_napi", + ] + + output_name = "libvideoprocessingengine_napi" + subsystem_name = "multimedia" + relative_install_dir = "module/multimedia" + part_name = "video_processing_engine" +} diff --git a/framework/algorithm/aihdr_enhancer/aihdr_enhancer_fwk.cpp b/framework/algorithm/aihdr_enhancer/aihdr_enhancer_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..616882fea919fac3293496ed2e8fb30270b205c0 --- /dev/null +++ b/framework/algorithm/aihdr_enhancer/aihdr_enhancer_fwk.cpp @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "aihdr_enhancer_fwk.h" + +#include "native_buffer.h" +#include "surface_buffer.h" + +#include "extension_manager.h" +#include "video_processing_client.h" +#include "vpe_log.h" +#include "vpe_trace.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +AihdrEnhancerFwk::AihdrEnhancerFwk() +{ + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +AihdrEnhancerFwk::~AihdrEnhancerFwk() +{ + if (impl_) { + impl_->Deinit(); + impl_ = nullptr; + } + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +VPEAlgoErrCode AihdrEnhancerFwk::SetParameter(const int& parameter) +{ + parameter_ = parameter; + VPE_LOGI("AihdrEnhancerFwk SetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode AihdrEnhancerFwk::GetParameter(int& parameter) const +{ + parameter = parameter_; + VPE_LOGI("AihdrEnhancerFwk GetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode AihdrEnhancerFwk::Process(const sptr& input) +{ + CHECK_AND_RETURN_RET_LOG(input != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Input is nullptr"); + + VPEAlgoErrCode ret = Init(input); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Init failed"); + + VPE_SYNC_TRACE; + ret = impl_->Process(input); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Process failed, ret: %{public}d", ret); + + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode AihdrEnhancerFwk::Init(const sptr& input) +{ + if (initialized_) { + impl_->SetParameter(parameter_); + return VPE_ALGO_ERR_OK; + } + auto &manager = Extension::ExtensionManager::GetInstance(); + + VPE_SYNC_TRACE; + + FrameInfo info(input); + impl_ = manager.CreateAihdrEnhancer(info, extensionInfo_); + CHECK_AND_RETURN_RET_LOG(impl_ != nullptr, VPE_ALGO_ERR_NOT_IMPLEMENTED, "Create failed"); + + int32_t ret = impl_->Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_NOT_IMPLEMENTED, "Init failed"); + impl_->SetParameter(parameter_); + initialized_ = true; + VPE_LOGI("AihdrEnhancerFwk Init Succeed"); + + return VPE_ALGO_ERR_OK; +} + +std::shared_ptr AihdrEnhancer::Create() +{ + auto p = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create AihdrEnhancer failed"); + return std::static_pointer_cast(p); +} + +int32_t AihdrEnhancerCreate(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + auto p = AihdrEnhancer::Create(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, VPE_ALGO_ERR_INVALID_VAL, "cannot create instance"); + Extension::ExtensionManager::InstanceVariableType instanceVar { p }; + int32_t newId = Extension::ExtensionManager::GetInstance().NewInstanceId(instanceVar); + CHECK_AND_RETURN_RET_LOG(newId != -1, VPE_ALGO_ERR_NO_MEMORY, "cannot create more instance"); + *instance = newId; + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage) +{ + CHECK_AND_RETURN_RET_LOG((inputImage != nullptr), VPE_ALGO_ERR_INVALID_VAL, + "invalid parameters"); + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + CHECK_AND_RETURN_RET_LOG(someInstance != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputImage, &ret](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + OH_NativeBuffer* inputImageNativeBuffer = nullptr; + CHECK_AND_RETURN_LOG( + (OH_NativeBuffer_FromNativeWindowBuffer(inputImage, &inputImageNativeBuffer) == GSERROR_OK), + "invalid input image"); + sptr inputImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputImageNativeBuffer)); + ret = var->Process(inputImageSurfaceBuffer); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + + return ret; +} + +int32_t AihdrEnhancerDestroy(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "instance is null"); + return Extension::ExtensionManager::GetInstance().RemoveInstanceReference(*instance); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_base.h b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_base.h new file mode 100644 index 0000000000000000000000000000000000000000..bfae48189584437551493dd2c6f9be54baf7ae1d --- /dev/null +++ b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_base.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_BASE_H +#define AIHDR_ENHANCER_BASE_H + +#include +#include + +#include "nocopyable.h" + +#include "frame_info.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class AihdrEnhancerBase : public NoCopyable { +public: + virtual ~AihdrEnhancerBase() = default; + virtual VPEAlgoErrCode Init() = 0; + virtual VPEAlgoErrCode Deinit() = 0; + virtual VPEAlgoErrCode SetParameter(const int& parameter) = 0; + virtual VPEAlgoErrCode GetParameter(int& parameter) = 0; + virtual VPEAlgoErrCode Process(const sptr& input) = 0; +}; + +using AihdrEnhancerCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // AIHDR_ENHANCER_BASE_H diff --git a/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_capability.h b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..1e50523483593883d16ff935dcb161c37ecba34e --- /dev/null +++ b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_capability.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_CAPABILITY_H +#define AIHDR_ENHANCER_CAPABILITY_H + +#include +#include + +#include "frame_info.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct AihdrEnhancerCapability { + ColorSpaceDescription colorspaceDesc; + std::vector pixelFormats; + uint32_t rank; + int32_t version; +}; + +using AihdrEnhancerCapabilitiesBuilder = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // AIHDR_ENHANCER_CAPABILITY_H diff --git a/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_fwk.h b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..71fa74b3cc87c27b20700ef535b2a7edbb00ee04 --- /dev/null +++ b/framework/algorithm/aihdr_enhancer/include/aihdr_enhancer_fwk.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_FWK_H +#define AIHDR_ENHANCER_FWK_H + +#include +#include +#include + +#include "aihdr_enhancer.h" +#include "aihdr_enhancer_base.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class AihdrEnhancerFwk : public AihdrEnhancer { +public: + AihdrEnhancerFwk(); + ~AihdrEnhancerFwk(); + VPEAlgoErrCode SetParameter(const int& parameter) override; + VPEAlgoErrCode GetParameter(int& parameter) const override; + VPEAlgoErrCode Process(const sptr& input) override; + +private: + VPEAlgoErrCode Init(const sptr& input); + + std::shared_ptr impl_ { nullptr }; + int parameter_{}; + std::atomic initialized_ { false }; + Extension::ExtensionInfo extensionInfo_; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // AIHDR_ENHANCER_FWK_H diff --git a/framework/algorithm/aihdr_enhancer_video/aihdr_enhancer_video_impl.cpp b/framework/algorithm/aihdr_enhancer_video/aihdr_enhancer_video_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f44a604530f158c7b7a7c3d4f64161bdce1bbc6b --- /dev/null +++ b/framework/algorithm/aihdr_enhancer_video/aihdr_enhancer_video_impl.cpp @@ -0,0 +1,622 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "aihdr_enhancer_video_impl.h" + +#include +#include +#include +#include + +#include "native_window.h" +#include "securec.h" +#include "surface.h" + +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "algorithm_utils.h" +#include "extension_manager.h" +#include "vpe_log.h" +#include "vpe_trace.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +std::shared_ptr AihdrEnhancerVideo::Create() +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init AihdrEnhancerVideoImpl"); + return impl; +} + +AihdrEnhancerVideoImpl::AihdrEnhancerVideoImpl() +{ + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 32; // 32 内存对齐 + requestCfg_.usage = 0; + requestCfg_.format = 0; + requestCfg_.width = 0; + requestCfg_.height = 0; + flushCfg_.timestamp = 0; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = 0; + flushCfg_.damage.h = 0; +} + +AihdrEnhancerVideoImpl::~AihdrEnhancerVideoImpl() +{ + Release(); +} + +int32_t AihdrEnhancerVideoImpl::Init() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, + "Init failed: not in UNINITIALIZED state"); + csc_ = AihdrEnhancer::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "AihdrEnhancer Create failed"); + + isRunning_.store(true); + taskThread_ = std::make_shared(&AihdrEnhancerVideoImpl::OnTriggered, this); + CHECK_AND_RETURN_RET_LOG(taskThread_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Fatal: No memory"); + + state_ = VPEAlgoState::INITIALIZED; + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::SetCallback(const std::shared_ptr &callback) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Set callback failed: callback is NULL"); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, + VPE_ALGO_ERR_INVALID_STATE, "SetCallback failed: not in INITIALIZED or CONFIGURING state"); + cb_ = callback; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::AttachToNewSurface(sptr newSurface) +{ + std::lock_guard lockrender(renderQueMutex_); + for (auto it = outputBufferAvilQueBak_.begin(); it != outputBufferAvilQueBak_.end(); ++it) { + auto buffer = it->second; + GSError err = newSurface->AttachBufferToQueue(buffer->memory); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "outputbuffer AttachToNewSurface fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::GetReleaseOutBuffer() +{ + std::lock_guard mapLock(renderQueMutex_); + for (RenderBufferAvilMapType::iterator it = renderBufferMapBak_.begin(); it != renderBufferMapBak_.end(); ++it) { + outputBufferAvilQue_.push(it->second); + } + renderBufferMapBak_.clear(); + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::SetOutputSurfaceConfig(sptr surface) +{ + GSError err = surface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + surface->SetQueueSize(outBufferCnt_); + outputSurface_ = surface; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::SetOutputSurfaceRunning(sptr newSurface) +{ + std::lock_guard lockSurface(surfaceChangeMutex_); + std::lock_guard lockSurface2(surfaceChangeMutex2_); + uint64_t oldId = outputSurface_->GetUniqueId(); + uint64_t newId = newSurface->GetUniqueId(); + CHECK_AND_RETURN_RET_LOG(oldId != newId, VPE_ALGO_ERR_OK, "SetOutputSurfaceRunning same surface"); + + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + GSError err = newSurface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + newSurface->SetQueueSize(outBufferCnt_); + newSurface->Connect(); + newSurface->CleanCache(); + GetReleaseOutBuffer(); + int32_t ret = AttachToNewSurface(newSurface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Attach new surface fail"); + + GraphicTransformType inTransform; + ScalingMode inScaleMode; + inTransform = inputSurface_->GetTransform(); + + outputSurface_ = newSurface; + err = outputSurface_->SetTransform(inTransform); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetTransform fail"); + + if (lastSurfaceSequence_ != MAX_SURFACE_SEQUENCE) { + err = inputSurface_->GetScalingMode(lastSurfaceSequence_, inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "GetScalingMode fail"); + err = outputSurface_->SetScalingMode(inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetScalingMode fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::SetOutputSurface(sptr surface) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_VAL, "surface is nullptr"); + CHECK_AND_RETURN_RET_LOG(surface->IsConsumer() == false, VPE_ALGO_ERR_INVALID_VAL, "surface is not producer"); + if (state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING) { + int32_t ret = SetOutputSurfaceConfig(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface config fail"); + } else if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED) { + int32_t ret = SetOutputSurfaceRunning(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface Running fail"); + } else { + CHECK_AND_RETURN_RET_LOG(false, VPE_ALGO_ERR_INVALID_STATE, "surface state not support SetOutputSurface"); + } + + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::SetSurface(const OHNativeWindow* window) +{ + CHECK_AND_RETURN_RET_LOG(window != nullptr && window->surface != nullptr, VPE_ALGO_ERR_INVALID_PARAM, + "window is null or surface buffer is null!"); + return SetOutputSurface(window->surface); +} + +sptr AihdrEnhancerVideoImpl::CreateInputSurface() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, nullptr, + "CreateInputSurface failed: not in INITIALIZED or CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ == nullptr, nullptr, "inputSurface already exists"); + + inputSurface_ = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, nullptr, "CreateSurfaceAsConsumer fail"); + sptr listener = new AihdrEnhancerBufferConsumerListener(this); + GSError err = inputSurface_->RegisterConsumerListener(listener); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, nullptr, "RegisterConsumerListener fail"); + + sptr producer = inputSurface_->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + CHECK_AND_RETURN_RET_LOG(producerSurface != nullptr, nullptr, "CreateSurfaceAsProducer fail"); + producerSurface->SetDefaultUsage(BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | + BUFFER_USAGE_MEM_DMA | BUFFER_USAGE_MEM_MMZ_CACHE); + inputSurface_->SetQueueSize(inBufferCnt_); + state_ = VPEAlgoState::CONFIGURING; + + return producerSurface; +} + +int32_t AihdrEnhancerVideoImpl::GetSurface(OHNativeWindow** window) +{ + CHECK_AND_RETURN_RET_LOG(window != nullptr, VPE_ALGO_ERR_INVALID_VAL, "window is null!"); + sptr surface = CreateInputSurface(); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_STATE, "get surface failed!"); + *window = CreateNativeWindowFromSurface(&surface); + CHECK_AND_RETURN_RET_LOG(*window != nullptr, VPE_ALGO_ERR_INVALID_STATE, "create window failed!"); + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::Configure() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING || state_ == + VPEAlgoState::STOPPED, VPE_ALGO_ERR_INVALID_STATE, "Configure failed: not in INITIALIZED or CONFIGURING state"); + int32_t ret = VPE_ALGO_ERR_OK; + state_ = (ret == VPE_ALGO_ERR_OK ? VPEAlgoState::CONFIGURING : VPEAlgoState::ERROR); + return ret; +} + +int32_t AihdrEnhancerVideoImpl::Prepare() +{ + std::lock_guard lock(mutex_); + if (state_ == VPEAlgoState::STOPPED) { + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; + } + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::CONFIGURING, VPE_ALGO_ERR_INVALID_STATE, + "Prepare failed: not in CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG(cb_ != nullptr && inputSurface_ != nullptr && outputSurface_ != nullptr, + VPE_ALGO_ERR_INVALID_OPERATION, "Prepare faled: inputSurface or outputSurface or callback is null"); + + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; +} + +void AihdrEnhancerVideoImpl::InitBuffers() +{ + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = requestCfg_.width; + flushCfg_.damage.h = requestCfg_.height; + for (uint32_t i = 0; i < outBufferCnt_; ++i) { + std::shared_ptr buffer = std::make_shared(); + GSError err = outputSurface_->RequestBuffer(buffer->memory, buffer->fence, requestCfg_); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("RequestBuffer %{public}u failed, GSError=%{public}d", i, err); + continue; + } + outputBufferAvilQue_.push(buffer); + outputBufferAvilQueBak_.insert(std::make_pair(buffer->memory->GetSeqNum(), buffer)); + } +} + +int32_t AihdrEnhancerVideoImpl::Start() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + (state_ == VPEAlgoState::CONFIGURED || state_ == VPEAlgoState::STOPPED || state_ == VPEAlgoState::FLUSHED), + VPE_ALGO_ERR_INVALID_STATE, + "Start failed: not in CONFIGURED or STOPPED state"); + if (isEos_.load()) { + state_ = VPEAlgoState::EOS; + } else { + state_ = VPEAlgoState::RUNNING; + } + cb_->OnState(static_cast(state_.load())); + cvTaskStart_.notify_all(); + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::Stop() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED, + VPE_ALGO_ERR_INVALID_STATE, + "Stop failed: not in RUNNING or EOS state"); + + state_ = VPEAlgoState::STOPPED; + if (!isProcessing_) { + cb_->OnState(static_cast(state_.load())); + } + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::Reset() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ != VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, "Start failed: not in right state"); + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::INITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + csc_ = AihdrEnhancer::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "ColorSpaceConverter Create failed"); + isEos_.store(false); + + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::Release() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::UNINITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + inputSurface_ = nullptr; + std::unique_lock lockSurface(surfaceChangeMutex_); + std::unique_lock lockSurface2(surfaceChangeMutex2_); + if (outputSurface_ != nullptr) { + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + outputSurface_ = nullptr; + } + lockSurface2.unlock(); + lockSurface.unlock(); + cb_ = nullptr; + csc_ = nullptr; + isRunning_.store(false); + } + if (taskThread_ != nullptr && taskThread_->joinable()) { + cvTaskStart_.notify_all(); + taskThread_->join(); + } + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::Flush() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + } + + { + std::unique_lock lockInQue(onBqMutex_); + std::queue> tempQueue; + inputBufferAvilQue_.swap(tempQueue); + for (; tempQueue.size() != 0;) { + auto buffer = tempQueue.front(); + tempQueue.pop(); + CHECK_AND_RETURN_RET_LOG(buffer && buffer->memory != nullptr, VPE_ALGO_ERR_UNKNOWN, "Invalid memory"); + GSError err = inputSurface_->ReleaseBuffer(buffer->memory, -1); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "Release buffer failed"); + } + } + + std::lock_guard mapLock(renderQueMutex_); + for (auto &[id, buffer] : renderBufferAvilMap_) { + VPE_LOGD("Reclaim buffer %{public}" PRIu64, id); + outputBufferAvilQue_.push(buffer); + } + renderBufferAvilMap_.clear(); + state_ = VPEAlgoState::FLUSHED; + return VPE_ALGO_ERR_OK; +} + +void AihdrEnhancerVideoImpl::Process(std::shared_ptr inputBuffer, + std::shared_ptr outputBuffer) +{ + VPETrace videoTrace("AihdrEnhancerVideoImpl::Process"); + int32_t ret = VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED; + outputBuffer->timestamp = inputBuffer->timestamp; + sptr surfaceInputBuffer = inputBuffer->memory; + sptr surfaceOutputBuffer = outputBuffer->memory; + bool copyRet = AlgorithmUtils::CopySurfaceBufferToSurfaceBuffer(surfaceInputBuffer, surfaceOutputBuffer); + if (!copyRet) { + requestCfg_.width = surfaceInputBuffer->GetWidth(); + requestCfg_.height = surfaceInputBuffer->GetHeight(); + requestCfg_.format = surfaceInputBuffer->GetFormat(); + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_COLORSPACE_INFO); + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_HDR_METADATA_TYPE); + if (surfaceOutputBuffer->Alloc(requestCfg_) == GSERROR_OK) { + copyRet = AlgorithmUtils::CopySurfaceBufferToSurfaceBuffer(surfaceInputBuffer, surfaceOutputBuffer); + } + } + if (copyRet) { + VPETrace cscTrace("AihdrEnhancerVideoImpl::csc_->Process"); + ret = csc_->Process(surfaceOutputBuffer); + } + if (ret != 0 && cb_) { + cb_->OnError(ret); + } + inputSurface_->ReleaseBuffer(surfaceInputBuffer, -1); + if (!ret) { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(outputBuffer); + } + + if (!ret && cb_) { + cb_->OnOutputBufferAvailable(surfaceOutputBuffer->GetSeqNum(), outputBuffer->bufferFlag); + } +} + +bool AihdrEnhancerVideoImpl::WaitProcessing() +{ + if (!isRunning_.load()) { + return false; + } + { + std::unique_lock lock(mtxTaskStart_); + cvTaskStart_.wait(lock, [this]() { + std::lock_guard inQueueLock(onBqMutex_); + std::lock_guard outQueueLock(renderQueMutex_); + return ((inputBufferAvilQue_.size() > 0 && outputBufferAvilQue_.size() > 0) || !isRunning_.load()); + }); + } + + return true; +} + +bool AihdrEnhancerVideoImpl::AcquireInputOutputBuffers(std::shared_ptr& inputBuffer, + std::shared_ptr& outputBuffer) +{ + std::lock_guard lockOnBq(onBqMutex_); + std::lock_guard mapLock(renderQueMutex_); + if (inputBufferAvilQue_.size() == 0 || outputBufferAvilQue_.size() == 0) { + if (state_ == VPEAlgoState::STOPPED) { + cb_->OnState(static_cast(state_.load())); + } + return false; + } + inputBuffer = inputBufferAvilQue_.front(); + outputBuffer = outputBufferAvilQue_.front(); + inputBufferAvilQue_.pop(); + outputBufferAvilQue_.pop(); + return inputBuffer && outputBuffer; +} + +void AihdrEnhancerVideoImpl::DoTask() +{ + std::shared_ptr inputBuffer = nullptr; + std::shared_ptr outputBuffer = nullptr; + while (true) { + std::lock_guard lockTask(mtxTaskDone_); + if (!isRunning_.load()) { + return; + } + isProcessing_.store(true); + + if (!AcquireInputOutputBuffers(inputBuffer, outputBuffer)) { + break; + } + if (inputBuffer->bufferFlag == AIHDR_ENHANCER_BUFFER_FLAG_EOS) { + { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } + if (cb_) { + cb_->OnOutputBufferAvailable(outputBuffer->memory->GetSeqNum(), AIHDR_ENHANCER_BUFFER_FLAG_EOS); + } + break; + } + Process(inputBuffer, outputBuffer); + } + isProcessing_.store(false); + cvTaskDone_.notify_all(); +} + +void AihdrEnhancerVideoImpl::OnTriggered() +{ + while (true) { + if (!WaitProcessing()) { + break; + } + + DoTask(); + } +} + +int32_t AihdrEnhancerVideoImpl::ReleaseOutputBuffer(uint32_t index, bool render) +{ + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS, VPE_ALGO_ERR_INVALID_STATE, + "ReleaseOutputBuffer failed: not in RUNNING or EOS state"); + + std::unique_lock lockRenderQue(renderQueMutex_); + auto search = renderBufferAvilMap_.find(index); + if (search == renderBufferAvilMap_.end()) { + VPE_LOGE("ReleaseOutputBuffer invalid index %{public}d ", index); + return VPE_ALGO_ERR_INVALID_PARAM; + } + auto buffer = search->second; + renderBufferAvilMap_.erase(search); + lockRenderQue.unlock(); + + if (render) { + flushCfg_.timestamp = buffer->timestamp; + { + std::lock_guard lockSurface(surfaceChangeMutex_); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + auto ret = outputSurface_->FlushBuffer(buffer->memory, -1, flushCfg_); + if (ret != 0) { + VPE_LOGE("ReleaseOutputBuffer flushbuffer err %{public}d ", (int)ret); + return VPE_ALGO_ERR_UNKNOWN; + } + } + std::lock_guard renderLock(renderQueMutex_); + renderBufferMapBak_.emplace(buffer->memory->GetSeqNum(), buffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(buffer); + } + return VPE_ALGO_ERR_OK; +} + +int32_t AihdrEnhancerVideoImpl::NotifyEos() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockOnBq(onBqMutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::RUNNING, VPE_ALGO_ERR_INVALID_STATE, + "NotifyEos failed: not in RUNNING state"); + state_ = VPEAlgoState::EOS; + isEos_.store(true); + std::shared_ptr buf = std::make_shared(); + buf->bufferFlag = AIHDR_ENHANCER_BUFFER_FLAG_EOS; + inputBufferAvilQue_.push(buf); + + cvTaskStart_.notify_all(); + + return VPE_ALGO_ERR_OK; +} + +GSError AihdrEnhancerVideoImpl::OnProducerBufferReleased() +{ + { + std::unique_lock lockSurface(surfaceChangeMutex2_); + std::lock_guard outQueLock(renderQueMutex_); + std::shared_ptr buf = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + if (renderBufferMapBak_.empty()) { + return GSERROR_OK; + } + GSError err = outputSurface_->RequestBuffer(buf->memory, buf->fence, requestCfg_); + if (err != GSERROR_OK || buf->memory == nullptr) { + VPE_LOGE("RequestBuffer failed, GSError=%{public}d", err); + return err; + } + lockSurface.unlock(); + outputBufferAvilQue_.push(buf); + auto bufSeqNum = buf->memory->GetSeqNum(); + lastSurfaceSequence_ = bufSeqNum; + renderBufferMapBak_.erase(bufSeqNum); + auto it = outputBufferAvilQueBak_.find(bufSeqNum); + if (it == outputBufferAvilQueBak_.end()) { + outputBufferAvilQueBak_.insert(std::make_pair(bufSeqNum, buf)); + auto firstSeqNum = renderBufferMapBak_.begin(); + if (firstSeqNum != renderBufferMapBak_.end()) { + outputBufferAvilQueBak_.erase(firstSeqNum->first); + renderBufferMapBak_.erase(firstSeqNum->first); + } + } + } + + if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +GSError AihdrEnhancerVideoImpl::OnConsumerBufferAvailable() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockInQue(onBqMutex_); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, GSERROR_OK, "inputSurface is nullptr"); + CHECK_AND_RETURN_RET_LOG(state_ != VPEAlgoState::STOPPED, GSERROR_OK, "state change to stop"); + std::shared_ptr buffer = std::make_shared(); + OHOS::Rect damage; + GSError err = inputSurface_->AcquireBuffer(buffer->memory, buffer->fence, buffer->timestamp, damage); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("AcquireBuffer failed, GSError=%{public}d", err); + return err; + } + inputBufferAvilQue_.push(buffer); + + if (!getUsage_) { + requestCfg_.usage = (buffer->memory->GetUsage() | requestCfg_.usage); + getUsage_ = true; + requestCfg_.width = buffer->memory->GetWidth(); + requestCfg_.height = buffer->memory->GetHeight(); + requestCfg_.format = buffer->memory->GetFormat(); + InitBuffers(); + } + + if (state_ == VPEAlgoState::RUNNING) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +void AihdrEnhancerBufferConsumerListener::OnBufferAvailable() +{ + if (process_ != nullptr) { + process_->OnConsumerBufferAvailable(); + } +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/aihdr_enhancer_video/include/aihdr_enhancer_video_impl.h b/framework/algorithm/aihdr_enhancer_video/include/aihdr_enhancer_video_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..44bd940b2fad367b26c076bf0a7bcfe9eaab2568 --- /dev/null +++ b/framework/algorithm/aihdr_enhancer_video/include/aihdr_enhancer_video_impl.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_VIDEO_IMPL_H +#define AIHDR_ENHANCER_VIDEO_IMPL_H + +#include +#include +#include +#include +#include + +#include "native_window.h" +#include "surface.h" +#include "sync_fence.h" + +#include "aihdr_enhancer.h" +#include "aihdr_enhancer_video.h" +#include "aihdr_enhancer_video_common.h" +#include "algorithm_video_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class AihdrEnhancerVideoImpl : public AihdrEnhancerVideo { +public: + AihdrEnhancerVideoImpl(); + ~AihdrEnhancerVideoImpl(); + int32_t Init(); + // 北向调用接口 + int32_t SetCallback(const std::shared_ptr &callback) override; + int32_t SetOutputSurface(sptr surface); + sptr CreateInputSurface(); + int32_t SetSurface(const OHNativeWindow* window) override; + int32_t GetSurface(OHNativeWindow** window) override; + int32_t Configure() override; + int32_t Prepare() override; + int32_t Start() override; + int32_t Stop() override; + int32_t Reset() override; + int32_t Release() override; + int32_t NotifyEos() override; + int32_t ReleaseOutputBuffer(uint32_t index, bool render) override; + int32_t Flush() override; + + GSError OnConsumerBufferAvailable(); + GSError OnProducerBufferReleased(); +private: + struct SurfaceBufferWrapper { + public: + SurfaceBufferWrapper() = default; + ~SurfaceBufferWrapper() = default; + + sptr memory{nullptr}; + AihdrEnhancerBufferFlag bufferFlag{AIHDR_ENHANCER_BUFFER_FLAG_NONE}; + sptr fence{nullptr}; + int64_t timestamp; + }; + void InitBuffers(); + bool WaitProcessing(); + bool AcquireInputOutputBuffers( + std::shared_ptr &inputBuffer, std::shared_ptr &outputBuffer); + void DoTask(); + void OnTriggered(); + void Process(std::shared_ptr inputBuffer, std::shared_ptr outputBuffer); + int32_t AttachToNewSurface(sptr newSurface); + int32_t SetOutputSurfaceConfig(sptr surface); + int32_t SetOutputSurfaceRunning(sptr newSurface); + int32_t GetReleaseOutBuffer(); + std::atomic state_{VPEAlgoState::UNINITIALIZED}; + std::shared_ptr cb_{nullptr}; + std::shared_ptr csc_{nullptr}; + std::mutex mutex_; + bool getUsage_{false}; + + // task相关 + std::mutex mtxTaskDone_; + std::condition_variable cvTaskDone_; + std::shared_ptr taskThread_{nullptr}; + std::condition_variable cvTaskStart_; + std::mutex mtxTaskStart_; + std::atomic isRunning_{false}; + std::atomic isProcessing_{false}; + std::atomic isEos_{false}; + + // surface相关 + std::queue> outputBufferAvilQue_; + std::queue> inputBufferAvilQue_; + std::queue> renderBufferAvilQue_; + using RenderBufferAvilMapType = std::map>; + RenderBufferAvilMapType renderBufferAvilMap_; + RenderBufferAvilMapType renderBufferMapBak_; + RenderBufferAvilMapType outputBufferAvilQueBak_; + std::mutex onBqMutex_; // inputsruface buffer + std::mutex renderQueMutex_; // outputsruface buffer + std::mutex surfaceChangeMutex_; + std::mutex surfaceChangeMutex2_; + sptr inputSurface_{nullptr}; + sptr outputSurface_{nullptr}; + static constexpr size_t MAX_BUFFER_CNT{5}; + uint32_t outBufferCnt_{MAX_BUFFER_CNT}; + uint32_t inBufferCnt_{MAX_BUFFER_CNT}; + static constexpr size_t MAX_SURFACE_SEQUENCE{std::numeric_limits::max()}; + uint32_t lastSurfaceSequence_{MAX_SURFACE_SEQUENCE}; + BufferRequestConfig requestCfg_{}; + BufferFlushConfig flushCfg_{}; +}; + +class AihdrEnhancerBufferConsumerListener : public OHOS::IBufferConsumerListener { +public: + explicit AihdrEnhancerBufferConsumerListener(AihdrEnhancerVideoImpl *process) : process_(process) {} + void OnBufferAvailable() override; + +private: + AihdrEnhancerVideoImpl *process_; +}; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // AIHDR_ENHANCER_VIDEO_IMPL_H diff --git a/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b6c269a7938dfce64b5532fc3834370d5e54200b --- /dev/null +++ b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp @@ -0,0 +1,358 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "colorspace_converter_fwk.h" +#include "extension_manager.h" +#include "native_buffer.h" +#include "surface_buffer.h" +#include "vpe_trace.h" +#include "vpe_log.h" +#include "EGL/egl.h" +#include "surface_buffer_info.h" +#include "video_processing_client.h" +#include + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +ColorSpaceConverterFwk::ColorSpaceConverterFwk() +{ + OpenCLInit(); + OpenGLInit(); + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +void ColorSpaceConverterFwk::OpenCLInit() +{ + void *OpenclFoundationHandle = nullptr; + std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; + auto ret = access(path.c_str(), F_OK); + if (ret != 0) { + VPE_LOGW("access = %d path = %s", ret, path.c_str()); + } else { + constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length + char deviceName[DEVICE_NAME_LENGTH]; + auto status = SetupOpencl(&OpenclFoundationHandle, "HUAWEI", deviceName); + if (status != static_cast(CL_SUCCESS)) { + VPE_LOGE("%{public}s, Error: setupOpencl status=%{public}d\n", __FUNCTION__, status); + } + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + VPE_LOGI("VPE Framework connect and load SA!"); + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + } + context.clContext = reinterpret_cast(OpenclFoundationHandle); +} + +void ColorSpaceConverterFwk::OpenGLInit() +{ + context.glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY); + if (context.glDisplay == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("ColorSpaceConverterFwk Get display failed!"); + } + EGLint major; + EGLint minor; + if (eglInitialize(context.glDisplay, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("ColorSpaceConverterFwk eglInitialize failed!"); + } +} + +ColorSpaceConverterFwk::ColorSpaceConverterFwk(std::shared_ptr openglContext, + ClContext *opengclContext) +{ + if (opengclContext != nullptr) { + context.clContext = opengclContext; + } + if (openglContext != nullptr) { + if (openglContext->display != EGL_NO_DISPLAY) { + context.glDisplay = openglContext->display; + } + } + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + VPE_LOGI("VPE Framework connect and load SA!"); + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +ColorSpaceConverterFwk::~ColorSpaceConverterFwk() +{ + impl_ = nullptr; + for (auto it = impls_.begin(); it != impls_.end(); ++it) { + if (it->second) { + it->second->Deinit(); + it->second = nullptr; + } + } + impls_.clear(); + CleanOpencl(context.clContext); + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +VPEAlgoErrCode ColorSpaceConverterFwk::SetParameter(const ColorSpaceConverterParameter ¶meter) +{ + CHECK_AND_RETURN_RET_LOG(parameter.renderIntent == RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC, + VPE_ALGO_ERR_INVALID_VAL, "Absolute colorimetric is the only supported render intent"); + parameter_ = parameter; + VPE_LOGI("ColorSpaceConverterFwk SetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ColorSpaceConverterFwk::GetParameter(ColorSpaceConverterParameter ¶meter) const +{ + CHECK_AND_RETURN_RET_LOG(parameter_ != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "Parameter is not set"); + + parameter = *parameter_; + VPE_LOGI("ColorSpaceConverterFwk GetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ColorSpaceConverterFwk::Process(const sptr &input, const sptr &output) +{ + CHECK_AND_RETURN_RET_LOG(parameter_ != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "Parameter is not set"); + CHECK_AND_RETURN_RET_LOG((input != nullptr) && (output != nullptr), VPE_ALGO_ERR_INVALID_VAL, + "Input or output is nullptr"); + VPEAlgoErrCode ret; + + ret = Init(input, output, context); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Init failed, ret: %{public}d", ret); + ret = impl_->SetParameter(*parameter_); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "Set parameter failed, ret: %{public}d", ret); + + VPE_SYNC_TRACE; + ret = impl_->Process(input, output); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Process failed, ret: %{public}d", ret); + + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ColorSpaceConverterFwk::ComposeImage(const sptr &inputSdrImage, + const sptr &inputGainmap, const sptr &outputHdrImage, bool legacy) +{ + CHECK_AND_RETURN_RET_LOG(parameter_ != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "Parameter is not set"); + CHECK_AND_RETURN_RET_LOG((inputSdrImage != nullptr) && (inputGainmap != nullptr) && (outputHdrImage != nullptr), + VPE_ALGO_ERR_INVALID_VAL, "Input or output is nullptr"); + + VPEAlgoErrCode ret = Init(inputSdrImage, outputHdrImage, context); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Init failed, ret: %{public}d", ret); + + ret = impl_->SetParameter(*parameter_); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "Set parameter failed, ret: %{public}d", ret); + + VPE_SYNC_TRACE; + ret = impl_->ComposeImage(inputSdrImage, inputGainmap, outputHdrImage, legacy); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_UNKNOWN, "Compose image failed, ret: %{public}d", + ret); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ColorSpaceConverterFwk::DecomposeImage(const sptr &inputImage, + const sptr &outputSdrImage, const sptr &outputGainmap) +{ + CHECK_AND_RETURN_RET_LOG(parameter_ != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "Parameter is not set"); + CHECK_AND_RETURN_RET_LOG((inputImage != nullptr) && (outputSdrImage != nullptr) && (outputGainmap != nullptr), + VPE_ALGO_ERR_INVALID_VAL, "Input or output is nullptr"); + VPEAlgoErrCode ret; + + ret = Init(inputImage, outputSdrImage, context); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Init failed, ret: %{public}d", ret); + + ret = impl_->SetParameter(*parameter_); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "Set parameter failed, ret: %{public}d", ret); + + VPE_SYNC_TRACE; + ret = impl_->DecomposeImage(inputImage, outputSdrImage, outputGainmap); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_UNKNOWN, "Decompose image failed, ret: %{public}d", + ret); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ColorSpaceConverterFwk::Init(const sptr &input, const sptr &output, + VPEContext ctx) +{ + CHECK_AND_RETURN_RET_LOG(context.clContext != nullptr || context.glDisplay != EGL_NO_DISPLAY, + VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, "opencl or opengl is not initialized!"); + auto &manager = Extension::ExtensionManager::GetInstance(); + VPE_SYNC_TRACE; + FrameInfo inputInfo(input); + FrameInfo outputInfo(output); + auto currentKey = + std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, outputInfo.colorSpace, outputInfo.pixelFormat); + auto it = impls_.find(currentKey); + if (it != impls_.end()) { + impl_ = it->second; + VPE_LOGD("ColorSpaceConverterFwk::Init find exist impl"); + return VPE_ALGO_ERR_OK; + } + impl_ = manager.CreateColorSpaceConverter(inputInfo, outputInfo, extensionInfo_); + CHECK_AND_RETURN_RET_LOG(impl_ != nullptr, VPE_ALGO_ERR_NOT_IMPLEMENTED, + "Extension create failed, get a empty impl"); + VPEAlgoErrCode ret = impl_->Init(inputInfo, outputInfo, ctx); + CHECK_AND_RETURN_RET_LOG(ret == 0, VPE_ALGO_ERR_EXTENSION_INIT_FAILED, + "Init failed, extension: %{public}s, version: %{public}s", extensionInfo_.name.c_str(), + extensionInfo_.version.c_str()); + VPE_LOGI("Init successfully, extension: %{public}s, version: %{public}s", extensionInfo_.name.c_str(), + extensionInfo_.version.c_str()); + impls_.insert(std::make_pair(currentKey, impl_)); + return VPE_ALGO_ERR_OK; +} + +std::shared_ptr ColorSpaceConverter::Create() +{ + auto p = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create failed, maybe caused of no memory"); + return std::static_pointer_cast(p); +} + +std::shared_ptr ColorSpaceConverter::Create(std::shared_ptr openglContext, + ClContext *opengclContext) +{ + auto p = std::make_shared(openglContext, opengclContext); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create failed, maybe caused of no memory"); + return std::static_pointer_cast(p); +} + +int32_t ColorSpaceConverterCreate(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + auto p = ColorSpaceConverter::Create(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, VPE_ALGO_ERR_INVALID_VAL, "cannot create instance"); + Extension::ExtensionManager::InstanceVariableType instanceVar { p }; + int32_t newId = Extension::ExtensionManager::GetInstance().NewInstanceId(instanceVar); + CHECK_AND_RETURN_RET_LOG(newId != -1, VPE_ALGO_ERR_NO_MEMORY, "cannot create more instance"); + *instance = newId; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage, + OHNativeWindowBuffer* outputImage) +{ + CHECK_AND_RETURN_RET_LOG((inputImage != nullptr && outputImage != nullptr), VPE_ALGO_ERR_INVALID_VAL, + "invalid parameters"); + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + CHECK_AND_RETURN_RET_LOG(someInstance != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputImage, outputImage, &ret](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + OH_NativeBuffer* inputImageNativeBuffer = nullptr; + OH_NativeBuffer* outputImageNativeBuffer = nullptr; + CHECK_AND_RETURN_LOG( + (OH_NativeBuffer_FromNativeWindowBuffer(inputImage, &inputImageNativeBuffer) == GSERROR_OK) && + (OH_NativeBuffer_FromNativeWindowBuffer(outputImage, &outputImageNativeBuffer) == GSERROR_OK), + "invalid input or output image"); + sptr inputImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputImageNativeBuffer)); + sptr outputImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(outputImageNativeBuffer)); + (void)var->SetParameter({ RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC, std::nullopt }); + ret = var->Process(inputImageSurfaceBuffer, outputImageSurfaceBuffer); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + + return ret; +} + +int32_t ColorSpaceConverterComposeImage(int32_t instance, OHNativeWindowBuffer* inputSdrImage, + OHNativeWindowBuffer* inputGainmap, OHNativeWindowBuffer* outputHdrImage, bool legacy) +{ + CHECK_AND_RETURN_RET_LOG((inputSdrImage != nullptr && inputGainmap != nullptr && outputHdrImage != nullptr), + VPE_ALGO_ERR_INVALID_VAL, "invalid parameters"); + + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + CHECK_AND_RETURN_RET_LOG(someInstance != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputSdrImage, inputGainmap, outputHdrImage, legacy, &ret](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + OH_NativeBuffer* inputSdrImageNativeBuffer = nullptr; + OH_NativeBuffer* inputGainmapNativeBuffer = nullptr; + OH_NativeBuffer* outputHdrImageNativeBuffer = nullptr; + CHECK_AND_RETURN_LOG( + (OH_NativeBuffer_FromNativeWindowBuffer(inputSdrImage, &inputSdrImageNativeBuffer) == GSERROR_OK) && + (OH_NativeBuffer_FromNativeWindowBuffer(inputGainmap, &inputGainmapNativeBuffer) == GSERROR_OK) && + (OH_NativeBuffer_FromNativeWindowBuffer(outputHdrImage, &outputHdrImageNativeBuffer) == GSERROR_OK), + "invalid input or output image"); + sptr inputSdrImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputSdrImageNativeBuffer)); + sptr inputGainmapSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputGainmapNativeBuffer)); + sptr outputHdrImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(outputHdrImageNativeBuffer)); + (void)var->SetParameter({ RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC, std::nullopt }); + ret = var->ComposeImage(inputSdrImageSurfaceBuffer, inputGainmapSurfaceBuffer, outputHdrImageSurfaceBuffer, + legacy); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + + return ret; +} + +int32_t ColorSpaceConverterDecomposeImage(int32_t instance, OHNativeWindowBuffer* inputImage, + OHNativeWindowBuffer* outputSdrImage, OHNativeWindowBuffer* outputGainmap) +{ + CHECK_AND_RETURN_RET_LOG((inputImage != nullptr && outputSdrImage != nullptr && outputGainmap != nullptr), + VPE_ALGO_ERR_INVALID_VAL, "invalid parameters"); + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + CHECK_AND_RETURN_RET_LOG(someInstance != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputImage, outputSdrImage, outputGainmap, &ret](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + OH_NativeBuffer* inputImageNativeBuffer = nullptr; + OH_NativeBuffer* outputSdrImageNativeBuffer = nullptr; + OH_NativeBuffer* outputGainmapNativeBuffer = nullptr; + CHECK_AND_RETURN_LOG( + (OH_NativeBuffer_FromNativeWindowBuffer(inputImage, &inputImageNativeBuffer) == GSERROR_OK) && + (OH_NativeBuffer_FromNativeWindowBuffer(outputSdrImage, &outputSdrImageNativeBuffer) == GSERROR_OK) && + (OH_NativeBuffer_FromNativeWindowBuffer(outputGainmap, &outputGainmapNativeBuffer) == GSERROR_OK), + "invalid input or output image"); + sptr inputImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputImageNativeBuffer)); + sptr outputSdrImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(outputSdrImageNativeBuffer)); + sptr outputGainmapSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(outputGainmapNativeBuffer)); + (void)var->SetParameter({ RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC, std::nullopt }); + ret = var->DecomposeImage(inputImageSurfaceBuffer, outputSdrImageSurfaceBuffer, outputGainmapSurfaceBuffer); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + + return ret; +} + +int32_t ColorSpaceConverterDestroy(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "instance is null"); + return Extension::ExtensionManager::GetInstance().RemoveInstanceReference(*instance); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/colorspace_converter/include/colorspace_converter_base.h b/framework/algorithm/colorspace_converter/include/colorspace_converter_base.h new file mode 100644 index 0000000000000000000000000000000000000000..05333db8dadf9252efff6e1d2858954ad443d304 --- /dev/null +++ b/framework/algorithm/colorspace_converter/include/colorspace_converter_base.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_BASE_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_BASE_H + +#include +#include +#include "nocopyable.h" +#include "frame_info.h" +#include "vpe_context.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class ColorSpaceConverterBase : public NoCopyable { +public: + virtual ~ColorSpaceConverterBase() = default; + virtual VPEAlgoErrCode Init(const FrameInfo &inputFrameInfo, const FrameInfo &outputFrameInfo, + VPEContext context) = 0; + virtual VPEAlgoErrCode Deinit() = 0; + virtual VPEAlgoErrCode SetParameter(const ColorSpaceConverterParameter ¶meter) = 0; + virtual VPEAlgoErrCode GetParameter(ColorSpaceConverterParameter ¶meter) = 0; + virtual VPEAlgoErrCode Process(const sptr &input, const sptr &output) = 0; + virtual VPEAlgoErrCode ComposeImage(const sptr &inputSdrImage, + const sptr &inputGainmap, const sptr &outputHdrImage, bool legacy) = 0; + virtual VPEAlgoErrCode DecomposeImage(const sptr &inputImage, + const sptr &outputSdrImage, const sptr &outputGainmap) = 0; +}; + +using ColorSpaceConverterCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_BASE_H diff --git a/framework/algorithm/colorspace_converter/include/colorspace_converter_capability.h b/framework/algorithm/colorspace_converter/include/colorspace_converter_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..cb07b544aedf7bcff0d2ca3e97aa84836b847735 --- /dev/null +++ b/framework/algorithm/colorspace_converter/include/colorspace_converter_capability.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_CAPABILITY_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_CAPABILITY_H + +#include +#include +#include "frame_info.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct ColorSpaceConverterCapability { + ColorSpaceDescription inputColorSpaceDesc; + ColorSpaceDescription outputColorSpaceDesc; + /* + The key is a supported input pixel format, and the value is a supported output pixel formats' vector. + In json format: + { + supportedInputPixelFormat: [supportedOutputPixelFormat, supportedOutputPixelFormat, ...], + ...... + } + + eg. + Supported input pixel formats are nv21, nv12 and rgba. + When the input pixel format is nv21, the supported output pixel format can be nv21, nv12. + When the input pixel format is nv12, the supported output pixel format can be nv21, nv12 and rgba. + When the input pixel format is rgba, the supported output pixel format can be rgba. + So the keys are nv21, nv12 and rgba, the corresponding value is a vector contains the supported output + pixel formats. + In json format: + { + nv21: [nv21, nv12], + nv12: [nv21, nv12, rgba], + rgba: [rgba], + ...... + } + */ + std::map> pixelFormatMap; + uint32_t rank; + int32_t version; +}; + +using ColorSpaceConverterCapabilitiesBuilder = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_COLORSPACE_CONVERTER_CAPABILITY_H diff --git a/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h b/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..39885e38997832095fddc7bb36892779bdf5c142 --- /dev/null +++ b/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_FWK_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_FWK_H + +#include +#include +#include +#include "colorspace_converter.h" +#include "colorspace_converter_base.h" +#include "extension_base.h" +#include "metadata_generator.h" +#include "metadata_generator_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Process + * 解码后的视频帧转换 + * 单层hdr图片 -> sdr图片 + * sdr图片 -> 单层hdr图片 + * sdr图片 -> sdr图片 + * ComposeImage + * 双层hdr图片 -> 单层hdr图片 + * DecomposeImage + * 单层hdr图片 -> 双层hdr图片 + * sdr图片 -> 双层hdr图片 + */ +class ColorSpaceConverterFwk : public ColorSpaceConverter { +public: + ColorSpaceConverterFwk(); + ColorSpaceConverterFwk(std::shared_ptr openglContext, + ClContext *opengclContext = nullptr); + ~ColorSpaceConverterFwk(); + VPEAlgoErrCode SetParameter(const ColorSpaceConverterParameter ¶meter) override; + VPEAlgoErrCode GetParameter(ColorSpaceConverterParameter ¶meter) const override; + VPEAlgoErrCode Process(const sptr &input, const sptr &output) override; + VPEAlgoErrCode ComposeImage(const sptr &inputSdrImage, const sptr &inputGainmap, + const sptr &outputHdrImage, bool legacy) override; + VPEAlgoErrCode DecomposeImage(const sptr &inputImage, const sptr &outputSdrImage, + const sptr &outputGainmap) override; + +private: + VPEAlgoErrCode Init(const sptr &input, const sptr &output, VPEContext context); + void OpenGLInit(); + void OpenCLInit(); + + std::shared_ptr impl_ { nullptr }; + std::optional parameter_ { std::nullopt }; + std::atomic initialized_ { false }; + Extension::ExtensionInfo extensionInfo_; + std::map< + std::tuple, + std::shared_ptr> impls_; + std::tuple + lastFrameInfoKey_; + VPEContext context; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_FWK_H diff --git a/framework/algorithm/colorspace_converter_display/colorspace_converter_display_fwk.cpp b/framework/algorithm/colorspace_converter_display/colorspace_converter_display_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..031a3c4194b6eee8c0a08a4d7a6671074855f8ce --- /dev/null +++ b/framework/algorithm/colorspace_converter_display/colorspace_converter_display_fwk.cpp @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "colorspace_converter_display_fwk.h" +#include "securec.h" +#include "extension_manager.h" +#include "vpe_parse_metadata.h" +#include "vpe_trace.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +ColorSpaceConverterDisplayFwk::ColorSpaceConverterDisplayFwk() +{ + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +ColorSpaceConverterDisplayFwk::~ColorSpaceConverterDisplayFwk() +{ + impl_.clear(); + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +VPEAlgoErrCode ColorSpaceConverterDisplayFwk::Process(const std::shared_ptr &input, + std::shared_ptr &output, const ColorSpaceConverterDisplayParameter ¶meter) +{ + auto ret = Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INIT_FAILED, "Init failed"); + VPE_LOGD("size of impl %{public}lu", impl_.size()); + CHECK_AND_RETURN_RET_LOG(!impl_.empty(), VPE_ALGO_ERR_NOT_IMPLEMENTED, "Extension is not found"); + + DeserializedDisplayParameter localParameter; + DeserializeDisplayParameter(parameter, localParameter); + + VPE_SYNC_TRACE; + for (const auto& impl : impl_) { + ret = impl->Process(input, output, localParameter); + if (ret == VPE_ALGO_ERR_OK) { + return VPE_ALGO_ERR_OK; + } + } + + VPE_LOGE("The operation is not supported"); + return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; +} + +VPEAlgoErrCode ColorSpaceConverterDisplayFwk::Init() +{ + if (initialized_) { + return VPE_ALGO_ERR_OK; + } + auto& manager = Extension::ExtensionManager::GetInstance(); + + VPE_SYNC_TRACE; + impl_ = manager.CreateColorSpaceConverterDisplay(); + CHECK_AND_RETURN_RET_LOG(!impl_.empty(), VPE_ALGO_ERR_NOT_IMPLEMENTED, "Create impl failed"); + + initialized_ = true; + VPE_LOGI("Successed"); + return VPE_ALGO_ERR_OK; +} + +void ColorSpaceConverterDisplayFwk::DeserializeDisplayParameter(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed) +{ + deserialzed.inputColorSpace = parameter.inputColorSpace; + deserialzed.outputColorSpace = parameter.outputColorSpace; + deserialzed.sdrNits = parameter.sdrNits; + deserialzed.tmoNits = parameter.tmoNits; + deserialzed.currentDisplayNits = parameter.currentDisplayNits; + deserialzed.disableHdrFloatHeadRoom = parameter.disableHdrFloatHeadRoom; + deserialzed.linearMatrix = parameter.layerLinearMatrix; + DeserializeStaticMetadata(parameter, deserialzed); + DeserializeDynamicMetadata(parameter, deserialzed); +} + +void ColorSpaceConverterDisplayFwk::DeserializeStaticMetadata(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed) +{ + if (parameter.staticMetadata.size() == 0) { + deserialzed.staticMetadata = std::nullopt; + return; + } + + HdrStaticMetadata data; + errno_t ret = memcpy_s(&data, sizeof(data), parameter.staticMetadata.data(), parameter.staticMetadata.size()); + if (ret != EOK) { + VPE_LOGE("memcpy_s failed, err = %d\n", ret); + return; + } + deserialzed.staticMetadata = data; +} + +void ColorSpaceConverterDisplayFwk::DeserializeDynamicMetadata(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed) +{ + if (parameter.dynamicMetadata.size() == 0) { + deserialzed.hdrVividMetadata = std::nullopt; + return; + } + + HdrVividMetadataV1 data; + HwDisplayMeta displayMeta; + (void)PraseMetadataFromArray(parameter.dynamicMetadata, data, displayMeta); + deserialzed.hdrVividMetadata = data; + deserialzed.displayMeta = displayMeta; +} + +std::shared_ptr ColorSpaceConverterDisplay::Create() +{ + auto p = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create failed, maybe caused of no memory"); + return std::static_pointer_cast(p); +} + +struct ColorSpaceConvertDisplayHandleImpl { + std::shared_ptr obj; +}; + +struct VPEShaderEffectHandleImpl { + std::shared_ptr obj; +}; + +ColorSpaceConvertDisplayHandle *ColorSpaceConvertDisplayCreate() +{ + std::shared_ptr impl = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, "failed to init ColorSpaceConvertDisplayCreate"); + auto handle = new ColorSpaceConvertDisplayHandleImpl; + handle->obj = impl; + return static_cast(handle); +} + +void ColorSpaceConvertDisplayDestroy(ColorSpaceConvertDisplayHandle *handle) +{ + VPE_LOGD("call ColorSpaceConvertDisplayDestroy"); + if (handle != nullptr) { + auto p = static_cast(handle); + delete p; + } +} + +VPEAlgoErrCode ColorSpaceConvertDisplayProcess(ColorSpaceConvertDisplayHandle *handle, VPEShaderEffectHandle *input, + VPEShaderEffectHandle *output, const ColorSpaceConverterDisplayParameter ¶meter) +{ + CHECK_AND_RETURN_RET_LOG(handle != nullptr, VPE_ALGO_ERR_INVALID_PARAM, "Input DisplayHandle in null!"); + VPEAlgoErrCode ret = VPE_ALGO_ERR_OK; + + auto p = static_cast(handle); + auto in = static_cast(input); + auto out = static_cast(output); + ret = p->obj->Process(in->obj, out->obj, parameter); + return ret; +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_base.h b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_base.h new file mode 100644 index 0000000000000000000000000000000000000000..ab5864020af74a3db796e517bec6e44cdea4ec6c --- /dev/null +++ b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_base.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_BASE_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_BASE_H + +#include +#include +#include "nocopyable.h" +#include "effect/shader_effect.h" +#include "algorithm_common.h" +#include "deserialized_display_parameter.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +using namespace HDI::Display::Graphic::Common::V1_0; + +class ColorSpaceConverterDisplayBase : public NoCopyable { +public: + virtual ~ColorSpaceConverterDisplayBase() = default; + virtual VPEAlgoErrCode Process(const std::shared_ptr& input, + std::shared_ptr& output, + const DeserializedDisplayParameter& parameter) = 0; +}; + +using ColorSpaceConverterDisplayCreator = std::function()>; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_BASE_H diff --git a/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_capability.h b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..9141281cb068cf5ce802b956313b86a62ba65990 --- /dev/null +++ b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_capability.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_CAPABILITY_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_CAPABILITY_H + +#include +#include +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +struct ColorSpaceConverterDisplayCapability { + ColorSpaceDescription inputColorspaceDesc; + ColorSpaceDescription outputColorSpaceDesc; + uint32_t rank; + int32_t version; +}; + +using ColorSpaceConverterDisplayCapabilitiesBuilder = + std::function()>; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_CAPABILITY_H diff --git a/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_fwk.h b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..15c24455d0514a157ad62ff31f39c368846d3812 --- /dev/null +++ b/framework/algorithm/colorspace_converter_display/include/colorspace_converter_display_fwk.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_FWK_H +#define FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_FWK_H + +#include +#include +#include +#include +#include "effect/shader_effect.h" +#include "colorspace_converter_display.h" +#include "colorspace_converter_display_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class ColorSpaceConverterDisplayFwk : public ColorSpaceConverterDisplay { +public: + ColorSpaceConverterDisplayFwk(); + ~ColorSpaceConverterDisplayFwk(); + VPEAlgoErrCode Process(const std::shared_ptr& input, + std::shared_ptr& output, + const ColorSpaceConverterDisplayParameter& parameter) override; +private: + VPEAlgoErrCode Init(); + void DeserializeDisplayParameter(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed); + void DeserializeStaticMetadata(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed); + void DeserializeDynamicMetadata(const ColorSpaceConverterDisplayParameter& parameter, + DeserializedDisplayParameter& deserialzed); + + std::set> impl_; + std::atomic initialized_ {false}; +}; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_COLORSPACE_CONVERTER_DISPLAY_FWK_H diff --git a/framework/algorithm/colorspace_converter_display/include/deserialized_display_parameter.h b/framework/algorithm/colorspace_converter_display/include/deserialized_display_parameter.h new file mode 100644 index 0000000000000000000000000000000000000000..9142ea4bd0fbb6abcd4fdce9052c589ee6bdfb13 --- /dev/null +++ b/framework/algorithm/colorspace_converter_display/include/deserialized_display_parameter.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_DESERIALIZED_DISPLAY_PARAMETER_H +#define VPE_FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_DESERIALIZED_DISPLAY_PARAMETER_H + +#include +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_common.h" +#include "hdr_vivid_metadata_v1.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +using namespace HDI::Display::Graphic::Common::V1_0; + +struct DeserializedDisplayParameter { + ColorSpaceDescription inputColorSpace; // 色彩空间信息 + ColorSpaceDescription outputColorSpace; // 色彩空间信息 + std::optional staticMetadata; // 静态元数据 + std::optional hdrVividMetadata; // 动态元数据 + float sdrNits; // SDR亮度 + float tmoNits; // TMO目标亮度 + float currentDisplayNits; // 屏幕当前亮度,和tmoNits相除得到sdr亮度打折比 + bool disableHdrFloatHeadRoom; // 不使用HDRfp16方案做额外提亮 + HwDisplayMeta displayMeta; // 拍显协同元数据 + int32_t width; // 宽度 + int32_t height; // 高度 + std::vector linearMatrix; // 线性域转换矩阵,作用在原色域上 +}; + +} // namespace VideoProcessingEngine +} // Media +} // OHOS + +#endif // VPE_FRAMEWORK_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_DESERIALIZED_DISPLAY_PARAMETER_H diff --git a/framework/algorithm/colorspace_converter_video/colorspace_converter_video_impl.cpp b/framework/algorithm/colorspace_converter_video/colorspace_converter_video_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..833bac287656ee9c3c0213cd7d9787b94ef298dc --- /dev/null +++ b/framework/algorithm/colorspace_converter_video/colorspace_converter_video_impl.cpp @@ -0,0 +1,1106 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "colorspace_converter_video_impl.h" +#include +#include +#include +#include +#include "vpe_log.h" +#include "algorithm_errors.h" +#include "colorspace_converter_video_description.h" +#include "algorithm_common.h" +#include "v1_0/buffer_handle_meta_key_type.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "vpe_trace.h" +#include "securec.h" +#include "extension_manager.h" +#include "meta/meta_key.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +std::shared_ptr ColorSpaceConverterVideo::Create() +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init ColorSpaceConverterVideoImpl"); + return impl; +} + +std::shared_ptr ColorSpaceConverterVideo::Create(std::shared_ptr openglContext) +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(openglContext); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init ColorSpaceConverterVideoImpl"); + return impl; +} + +ColorSpaceConverterVideoImpl::ColorSpaceConverterVideoImpl() +{ + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 32; // 32 byte alignment + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.format = 0; + requestCfg_.width = 0; + requestCfg_.height = 0; + + flushCfg_.timestamp = 0; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = 0; + flushCfg_.damage.h = 0; +} + +ColorSpaceConverterVideoImpl::~ColorSpaceConverterVideoImpl() +{ + Release(); +} + +int32_t ColorSpaceConverterVideoImpl::Init() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, "Init failed: not in UNINITIALIZED state"); + + csc_ = ColorSpaceConverter::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "ColorSpaceConverter Create failed"); + + isRunning_.store(true); + taskThread_ = std::make_shared(&ColorSpaceConverterVideoImpl::OnTriggered, this); + CHECK_AND_RETURN_RET_LOG(taskThread_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Fatal: No memory"); + + state_ = VPEAlgoState::INITIALIZED; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Init(std::shared_ptr openglContext) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, "Init failed: not in UNINITIALIZED state"); + + csc_ = ColorSpaceConverter::Create(openglContext); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "ColorSpaceConverter Create failed"); + + isRunning_.store(true); + taskThread_ = std::make_shared(&ColorSpaceConverterVideoImpl::OnTriggered, this); + CHECK_AND_RETURN_RET_LOG(taskThread_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Fatal: No memory"); + + state_ = VPEAlgoState::INITIALIZED; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::SetCallback(const std::shared_ptr &callback) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Set callback failed: callback is NULL"); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, + VPE_ALGO_ERR_INVALID_STATE, "SetCallback failed: not in INITIALIZED or CONFIGURING state"); + cb_ = callback; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::AttachToNewSurface(sptr newSurface) +{ + std::lock_guard lockrender(renderQueMutex_); + for (auto it = outputBufferAvilQueBak_.begin(); it != outputBufferAvilQueBak_.end(); ++it) { + auto buffer = it->second; + GSError err = newSurface->AttachBufferToQueue(buffer->memory); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "outputbuffer AttachToNewSurface fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::GetReleaseOutBuffer() +{ + std::lock_guard mapLock(renderQueMutex_); + for (RenderBufferAvilMapType::iterator it = renderBufferMapBak_.begin(); it != renderBufferMapBak_.end(); ++it) { + outputBufferAvilQue_.push(it->second); + } + renderBufferMapBak_.clear(); + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::SetOutputSurfaceConfig(sptr surface) +{ + GSError err = surface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + surface->SetQueueSize(outBufferCnt_); + outputSurface_ = surface; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::SetOutputSurfaceRunning(sptr newSurface) +{ + std::lock_guard lockSurface(surfaceChangeMutex_); + std::lock_guard lockSurface2(surfaceChangeMutex2_); + uint64_t oldId = outputSurface_->GetUniqueId(); + uint64_t newId = newSurface->GetUniqueId(); + if (oldId == newId) { + VPE_LOGD("SetOutputSurfaceRunning same surface"); + return VPE_ALGO_ERR_OK; + } + + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + GSError err = newSurface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + newSurface->SetQueueSize(outBufferCnt_); + newSurface->Connect(); + newSurface->CleanCache(); + GetReleaseOutBuffer(); + int32_t ret = AttachToNewSurface(newSurface); + if (ret != VPE_ALGO_ERR_OK) { + return ret; + } + + GraphicTransformType inTransform; + ScalingMode inScaleMode; + inTransform = inputSurface_->GetTransform(); + outputSurface_ = newSurface; + err = outputSurface_->SetTransform(inTransform); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetTransform fail"); + if (lastSurfaceSequence_ != MAX_SURFACE_SEQUENCE) { + err = inputSurface_->GetScalingMode(lastSurfaceSequence_, inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "GetScalingMode fail"); + err = outputSurface_->SetScalingMode(inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetScalingMode fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::SetOutputSurface(sptr surface) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_VAL, "surface is nullptr"); + CHECK_AND_RETURN_RET_LOG(surface->IsConsumer() == false, VPE_ALGO_ERR_INVALID_VAL, "surface is not producer"); + if (state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING) { + int32_t ret = SetOutputSurfaceConfig(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface config fail"); + } else if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED) { + int32_t ret = SetOutputSurfaceRunning(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface Running fail"); + } else { + CHECK_AND_RETURN_RET_LOG(false, VPE_ALGO_ERR_INVALID_STATE, "surface state not support SetOutputSurface"); + } + + return VPE_ALGO_ERR_OK; +} + +sptr ColorSpaceConverterVideoImpl::CreateInputSurface() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, nullptr, + "CreateInputSurface failed: not in INITIALIZED or CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ == nullptr, nullptr, "inputSurface already exists"); + + inputSurface_ = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, nullptr, "CreateSurfaceAsConsumer fail"); + sptr listener = new ImageProcessBufferConsumerListener(this); + GSError err = inputSurface_->RegisterConsumerListener(listener); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, nullptr, "RegisterConsumerListener fail"); + + sptr producer = inputSurface_->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + CHECK_AND_RETURN_RET_LOG(producerSurface != nullptr, nullptr, "CreateSurfaceAsProducer fail"); + producerSurface->SetDefaultUsage(BUFFER_USAGE_CPU_READ); + inputSurface_->SetQueueSize(inBufferCnt_); + state_ = VPEAlgoState::CONFIGURING; + + return producerSurface; +} + +int32_t ColorSpaceConverterVideoImpl::ConfigureColorSpace(const Format &format) +{ + int32_t primaries; + if (!format.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, primaries)) { + VPE_LOGE("format should contain colorspace_primaries"); + return VPE_ALGO_ERR_INVALID_VAL; + } + + int32_t transfunc; + if (!format.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, transfunc)) { + VPE_LOGE("format should contain colorspace_trans_func"); + return VPE_ALGO_ERR_INVALID_VAL; + } + + int32_t matrix; + if (!format.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, matrix)) { + VPE_LOGE("format should contain colorspace_matrix"); + return VPE_ALGO_ERR_INVALID_VAL; + } + + int32_t range; + if (!format.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, range)) { + VPE_LOGE("format should contain colorspace_range"); + return VPE_ALGO_ERR_INVALID_VAL; + } + + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, primaries); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, transfunc); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, matrix); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, range); + + CM_ColorSpaceInfo outColorInfo{ + .primaries = static_cast(primaries), + .transfunc = static_cast(transfunc), + .matrix = static_cast(matrix), + .range = static_cast(range)}; + + colorSpaceVec_.resize(sizeof(outColorInfo)); + errno_t ret = memcpy_s(colorSpaceVec_.data(), colorSpaceVec_.size(), &outColorInfo, sizeof(outColorInfo)); + if (ret != EOK) { + VPE_LOGE("memcpy_s failed, err = %d\n", ret); + return VPE_ALGO_ERR_INVALID_VAL; + } + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Configure(const Format &format) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING || state_ == + VPEAlgoState::STOPPED, VPE_ALGO_ERR_INVALID_STATE, "Configure failed: not in INITIALIZED or CONFIGURING state"); + GraphicPixelFormat surfacePixelFmt; + if (!format.GetIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, *(int *)&surfacePixelFmt)) { + VPE_LOGE("format should contain output pixel_format"); + return VPE_ALGO_ERR_INVALID_VAL; + } + int32_t outputColorSpace = 0; + if (format.GetIntValue(Media::Tag::VIDEO_DECODER_OUTPUT_COLOR_SPACE, outputColorSpace)) { + outputFormat_.PutIntValue(Media::Tag::VIDEO_DECODER_OUTPUT_COLOR_SPACE, outputColorSpace); + } + requestCfg_.format = surfacePixelFmt; + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(surfacePixelFmt)); + // 指定色彩空间 + if (format.GetIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, hdrType_)) { + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, hdrType_); + hdrVec_.resize(sizeof(hdrType_)); + errno_t ret = memcpy_s(hdrVec_.data(), hdrVec_.size(), &hdrType_, sizeof(hdrType_)); + if (ret != EOK) { + VPE_LOGE("memcpy_s failed, err = %d\n", ret); + return VPE_ALGO_ERR_INVALID_VAL; + } + } + + if (ConfigureColorSpace(format) != VPE_ALGO_ERR_OK) { + return VPE_ALGO_ERR_INVALID_VAL; + } + int32_t renderIntent; + if (format.GetIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, renderIntent)) { + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, renderIntent); + } + double sdruiBrightnessRatio; + if (format.GetDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, sdruiBrightnessRatio)) { + format_.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, sdruiBrightnessRatio); + } + + ColorSpaceConverterParameter param = {RenderIntent(renderIntent), sdruiBrightnessRatio}; + int32_t ret = csc_->SetParameter(param); + state_ = (ret == VPE_ALGO_ERR_OK ? VPEAlgoState::CONFIGURING : VPEAlgoState::ERROR); + return ret; +} + +int32_t ColorSpaceConverterVideoImpl::GetParameter(Format ¶meter) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ >= VPEAlgoState::CONFIGURED && state_ < VPEAlgoState::EOS, + VPE_ALGO_ERR_INVALID_STATE, "GetParameter failed: not in right state"); + ColorSpaceConverterParameter param; + csc_->GetParameter(param); + if (param.sdrUIBrightnessRatio.has_value()) { + format_.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, param.sdrUIBrightnessRatio.value()); + } + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, int(param.renderIntent)); + parameter = format_; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::SetParameter(const Format ¶meter) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ >= VPEAlgoState::CONFIGURED && state_ <= VPEAlgoState::RUNNING, + VPE_ALGO_ERR_INVALID_STATE, "SetParameter failed: not in right state"); + + int32_t renderIntent; + if (parameter.GetIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, renderIntent)) { + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, renderIntent); + } + double sdruiBrightnessRatio; + if (parameter.GetDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, sdruiBrightnessRatio)) { + format_.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, sdruiBrightnessRatio); + } + ColorSpaceConverterParameter param = {RenderIntent(renderIntent), sdruiBrightnessRatio}; + int32_t ret = csc_->SetParameter(param); + if (ret != VPE_ALGO_ERR_OK) { + state_ = VPEAlgoState::ERROR; + VPE_LOGE("SetParameter failed"); + return ret; + } + return ret; +} + +int32_t ColorSpaceConverterVideoImpl::Prepare() +{ + std::lock_guard lock(mutex_); + if (state_ == VPEAlgoState::STOPPED) { + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; + } + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::CONFIGURING, VPE_ALGO_ERR_INVALID_STATE, "Prepare failed: not in CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG( + cb_ != nullptr && inputSurface_ != nullptr && outputSurface_ != nullptr && format_.GetFormatMap().size() > 0, + VPE_ALGO_ERR_INVALID_OPERATION, "Prepare faled: inputSurface or outputSurface or callback is null"); + + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; +} + +void GetFormatFromSurfaceBuffer(Format &outputFormat, sptr &buffer) +{ + outputFormat.PutIntValue(Media::Tag::VIDEO_WIDTH, buffer->GetWidth()); + outputFormat.PutIntValue(Media::Tag::VIDEO_HEIGHT, buffer->GetHeight()); + outputFormat.PutIntValue(Media::Tag::VIDEO_PIC_WIDTH, buffer->GetWidth()); + outputFormat.PutIntValue(Media::Tag::VIDEO_PIC_HEIGHT, buffer->GetHeight()); + int32_t stride = buffer->GetStride(); + outputFormat.PutIntValue(Media::Tag::VIDEO_STRIDE, stride); + if (stride <= 0) { + VPE_LOGW("invalid stride %d", stride); + return; + } + OH_NativeBuffer_Planes *planes = nullptr; + GSError err = buffer->GetPlanesInfo(reinterpret_cast(&planes)); + if (err != GSERROR_OK || planes == nullptr) { + VPE_LOGW("get plane info failed, GSError=%{public}d", err); + return; + } + for (uint32_t j = 0; j < planes->planeCount; j++) { + VPE_LOGD("plane[%{public}u]: offset=%{public}" PRIu64 ", rowStride=%{public}u, columnStride=%{public}u", + j, + planes->planes[j].offset, + planes->planes[j].rowStride, + planes->planes[j].columnStride); + } + int32_t sliceHeight = buffer->GetHeight(); + if (planes->planeCount > 1) { + sliceHeight = static_cast(static_cast(planes->planes[1].offset) / stride); + } + outputFormat.PutIntValue(Media::Tag::VIDEO_SLICE_HEIGHT, sliceHeight); +} + +void ColorSpaceConverterVideoImpl::InitBuffers() +{ + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = requestCfg_.width; + flushCfg_.damage.h = requestCfg_.height; + bool firstBuffer = true; + for (uint32_t i = 0; i < outBufferCnt_; ++i) { + std::shared_ptr buffer = std::make_shared(); + GSError err = outputSurface_->RequestBuffer(buffer->memory, buffer->fence, requestCfg_); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("RequestBuffer %{public}u failed, GSError=%{public}d", i, err); + continue; + } + outputBufferAvilQue_.push(buffer); + outputBufferAvilQueBak_.insert(std::make_pair(buffer->memory->GetSeqNum(), buffer)); + if (firstBuffer) { + GetFormatFromSurfaceBuffer(outputFormat_, buffer->memory); + firstBuffer = false; + } + } +} + +int32_t ColorSpaceConverterVideoImpl::Start() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + (state_ == VPEAlgoState::CONFIGURED || state_ == VPEAlgoState::STOPPED || state_ == VPEAlgoState::FLUSHED), + VPE_ALGO_ERR_INVALID_STATE, + "Start failed: not in CONFIGURED or STOPPED state"); + if (isEos_.load()) { + state_ = VPEAlgoState::EOS; + } else { + state_ = VPEAlgoState::RUNNING; + } + cvTaskStart_.notify_all(); + cb_->OnState(static_cast(state_.load())); + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Stop() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED, + VPE_ALGO_ERR_INVALID_STATE, + "Stop failed: not in RUNNING or EOS state"); + + state_ = VPEAlgoState::STOPPED; + if (!isProcessing_) { + cb_->OnState(static_cast(state_.load())); + } + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Reset() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ != VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, "Start failed: not in right state"); + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::INITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + csc_ = ColorSpaceConverter::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "ColorSpaceConverter Create failed"); + format_ = Format(); + colorSpaceVec_.clear(); + hdrVec_.clear(); + isEos_.store(false); + + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Release() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::UNINITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + inputSurface_ = nullptr; + std::unique_lock lockSurface(surfaceChangeMutex_); + std::unique_lock lockSurface2(surfaceChangeMutex2_); + if (outputSurface_ != nullptr) { + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + outputSurface_ = nullptr; + } + lockSurface2.unlock(); + lockSurface.unlock(); + cb_ = nullptr; + csc_ = nullptr; + isRunning_.store(false); + } + if (taskThread_ != nullptr && taskThread_->joinable()) { + cvTaskStart_.notify_all(); + taskThread_->join(); + } + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::Flush() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + } + + { + std::unique_lock lockInQue(onBqMutex_); + std::queue> tempQueue; + inputBufferAvilQue_.swap(tempQueue); + for (; tempQueue.size() != 0;) { + auto buffer = tempQueue.front(); + tempQueue.pop(); + CHECK_AND_RETURN_RET_LOG(buffer && buffer->memory != nullptr, VPE_ALGO_ERR_UNKNOWN, "Invalid memory"); + GSError err = inputSurface_->ReleaseBuffer(buffer->memory, -1); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "Release buffer failed"); + } + } + + std::lock_guard mapLock(renderQueMutex_); + for (auto &[id, buffer] : renderBufferAvilMap_) { + VPE_LOGD("Reclaim buffer %{public}" PRIu64, id); + outputBufferAvilQue_.push(buffer); + } + renderBufferAvilMap_.clear(); + state_ = VPEAlgoState::FLUSHED; + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::GetOutputFormat(Format &format) +{ + int32_t width = 0; + if (outputFormat_.GetIntValue(Media::Tag::VIDEO_WIDTH, width)) { + format.PutIntValue(Media::Tag::VIDEO_WIDTH, width); + format.PutIntValue(Media::Tag::VIDEO_PIC_WIDTH, width); + } + int32_t height = 0; + if (outputFormat_.GetIntValue(Media::Tag::VIDEO_HEIGHT, height)) { + format.PutIntValue(Media::Tag::VIDEO_HEIGHT, height); + format.PutIntValue(Media::Tag::VIDEO_PIC_HEIGHT, height); + } + int32_t stride = 0; + if (outputFormat_.GetIntValue(Media::Tag::VIDEO_STRIDE, stride)) { + format.PutIntValue(Media::Tag::VIDEO_STRIDE, stride); + } + int32_t sliceHeight = 0; + if (outputFormat_.GetIntValue(Media::Tag::VIDEO_SLICE_HEIGHT, sliceHeight)) { + format.PutIntValue(Media::Tag::VIDEO_SLICE_HEIGHT, sliceHeight); + } + int32_t outputColorSpace = 0; + if (outputFormat_.GetIntValue(Media::Tag::VIDEO_DECODER_OUTPUT_COLOR_SPACE, outputColorSpace)) { + format.PutIntValue(Media::Tag::VIDEO_DECODER_OUTPUT_COLOR_SPACE, outputColorSpace); + } + return VPE_ALGO_ERR_OK; +} + +void ColorSpaceConverterVideoImpl::Process( + std::shared_ptr inputBuffer, std::shared_ptr outputBuffer) +{ + int32_t ret = 0; + outputBuffer->timestamp = inputBuffer->timestamp; + sptr surfaceInputBuffer = inputBuffer->memory; + sptr surfaceOutputBuffer = outputBuffer->memory; + int32_t currentWidth = surfaceInputBuffer->GetWidth(); + int32_t currentHeight = surfaceInputBuffer->GetHeight(); + if ((currentWidth != surfaceOutputBuffer->GetWidth()) || (currentHeight != surfaceOutputBuffer->GetHeight())) { + requestCfg_.width = currentWidth; + requestCfg_.height = currentHeight; + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_COLORSPACE_INFO); + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_HDR_METADATA_TYPE); + surfaceOutputBuffer->Alloc(requestCfg_); + } + if (colorSpaceVec_.size() > 0) { + surfaceOutputBuffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, colorSpaceVec_); + } + if (hdrVec_.size() > 0) { + surfaceOutputBuffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, hdrVec_); + } + if ((currentWidth != lastOutputWidth_) || (currentHeight != lastOutputHeight_)) { + Format outputFormat; + GetFormatFromSurfaceBuffer(outputFormat_, surfaceOutputBuffer); + GetOutputFormat(outputFormat); + cb_->OnOutputFormatChanged(outputFormat); + lastOutputWidth_ = currentWidth; + lastOutputHeight_ = currentHeight; + } + { + VPETrace cscTrace("ColorSpaceConverterVideoImpl::csc_->Process"); + ret = csc_->Process(surfaceInputBuffer, surfaceOutputBuffer); + if (ret != 0 && cb_) { + cb_->OnError(ret); + } + } + inputSurface_->ReleaseBuffer(surfaceInputBuffer, -1); + if (!ret) { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(outputBuffer); + } + if (!ret && cb_) { + cb_->OnOutputBufferAvailable(surfaceOutputBuffer->GetSeqNum(), outputBuffer->bufferFlag); + } +} + +bool ColorSpaceConverterVideoImpl::WaitProcessing() +{ + if (!isRunning_.load()) { + return false; + } + + { + std::unique_lock lock(mtxTaskStart_); + cvTaskStart_.wait(lock, [this]() { + std::lock_guard inQueueLock(onBqMutex_); + std::lock_guard outQueueLock(renderQueMutex_); + return ((inputBufferAvilQue_.size() > 0 && outputBufferAvilQue_.size() > 0) || !isRunning_.load()); + }); + } + + return true; +} + +bool ColorSpaceConverterVideoImpl::AcquireInputOutputBuffers( + std::shared_ptr &inputBuffer, std::shared_ptr &outputBuffer) +{ + std::lock_guard lockOnBq(onBqMutex_); + std::lock_guard mapLock(renderQueMutex_); + if (inputBufferAvilQue_.size() == 0 || outputBufferAvilQue_.size() == 0) { + if (state_ == VPEAlgoState::STOPPED) { + cb_->OnState(static_cast(state_.load())); + } + return false; + } + inputBuffer = inputBufferAvilQue_.front(); + outputBuffer = outputBufferAvilQue_.front(); + inputBufferAvilQue_.pop(); + outputBufferAvilQue_.pop(); + return inputBuffer && outputBuffer; +} + +void ColorSpaceConverterVideoImpl::DoTask() +{ + std::shared_ptr inputBuffer = nullptr; + std::shared_ptr outputBuffer = nullptr; + while (true) { + std::lock_guard lockTask(mtxTaskDone_); + if (!isRunning_.load()) { + return; + } + isProcessing_.store(true); + + if (!AcquireInputOutputBuffers(inputBuffer, outputBuffer)) { + break; + } + + if (inputBuffer->bufferFlag == CSCV_BUFFER_FLAG_EOS) { + { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } + + if (cb_) { + cb_->OnOutputBufferAvailable(outputBuffer->memory->GetSeqNum(), CSCV_BUFFER_FLAG_EOS); + } + break; + } + + Process(inputBuffer, outputBuffer); + } + isProcessing_.store(false); + cvTaskDone_.notify_all(); +} + +void ColorSpaceConverterVideoImpl::OnTriggered() +{ + while (true) { + if (!WaitProcessing()) { + break; + } + + DoTask(); + } +} + +int32_t ColorSpaceConverterVideoImpl::ReleaseOutputBuffer(uint32_t index, bool render) +{ + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS, + VPE_ALGO_ERR_INVALID_STATE, "ReleaseOutputBuffer failed: not in RUNNING or EOS state"); + + std::unique_lock lockRenderQue(renderQueMutex_); + auto search = renderBufferAvilMap_.find(index); + if (search == renderBufferAvilMap_.end()) { + VPE_LOGE("ReleaseOutputBuffer invalid index %{public}d ", index); + return VPE_ALGO_ERR_INVALID_PARAM; + } + auto buffer = search->second; + renderBufferAvilMap_.erase(search); + lockRenderQue.unlock(); + + if (render) { + flushCfg_.timestamp = buffer->timestamp; + flushCfg_.damage.w = buffer->memory->GetWidth(); + flushCfg_.damage.h = buffer->memory->GetHeight(); + { + std::lock_guard lockSurface(surfaceChangeMutex_); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + auto ret = outputSurface_->FlushBuffer(buffer->memory, -1, flushCfg_); + if (ret != 0) { + VPE_LOGE("ReleaseOutputBuffer flushbuffer err %{public}d ", (int)ret); + return VPE_ALGO_ERR_UNKNOWN; + } + } + std::lock_guard renderLock(renderQueMutex_); + renderBufferMapBak_.emplace(buffer->memory->GetSeqNum(), buffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(buffer); + } + + return VPE_ALGO_ERR_OK; +} + +int32_t ColorSpaceConverterVideoImpl::NotifyEos() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockOnBq(onBqMutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::RUNNING, VPE_ALGO_ERR_INVALID_STATE, "NotifyEos failed: not in RUNNING state"); + state_ = VPEAlgoState::EOS; + isEos_.store(true); + std::shared_ptr buf = std::make_shared(); + buf->bufferFlag = CSCV_BUFFER_FLAG_EOS; + inputBufferAvilQue_.push(buf); + + cvTaskStart_.notify_all(); + + return VPE_ALGO_ERR_OK; +} + +GSError ColorSpaceConverterVideoImpl::OnProducerBufferReleased() +{ + { + std::unique_lock lockSurface(surfaceChangeMutex2_); + std::lock_guard outQueLock(renderQueMutex_); + std::shared_ptr buf = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + if (renderBufferMapBak_.empty()) { + return GSERROR_OK; + } + GSError err = outputSurface_->RequestBuffer(buf->memory, buf->fence, requestCfg_); + if (err != GSERROR_OK || buf->memory == nullptr) { + VPE_LOGE("RequestBuffer failed, GSError=%{public}d", err); + return err; + } + lockSurface.unlock(); + outputBufferAvilQue_.push(buf); + auto bufSeqNum = buf->memory->GetSeqNum(); + lastSurfaceSequence_ = bufSeqNum; + renderBufferMapBak_.erase(bufSeqNum); + auto it = outputBufferAvilQueBak_.find(bufSeqNum); + if (it == outputBufferAvilQueBak_.end()) { + outputBufferAvilQueBak_.insert(std::make_pair(bufSeqNum, buf)); + auto firstSeqNum = renderBufferMapBak_.begin(); + if (firstSeqNum != renderBufferMapBak_.end()) { + outputBufferAvilQueBak_.erase(firstSeqNum->first); + renderBufferMapBak_.erase(firstSeqNum->first); + } + } + } + if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +GSError ColorSpaceConverterVideoImpl::OnConsumerBufferAvailable() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockInQue(onBqMutex_); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, GSERROR_OK, "inputSurface is nullptr"); + CHECK_AND_RETURN_RET_LOG(state_ != VPEAlgoState::STOPPED, GSERROR_OK, "state change to stop"); + std::shared_ptr buffer = std::make_shared(); + OHOS::Rect damage; + GSError err = inputSurface_->AcquireBuffer(buffer->memory, buffer->fence, buffer->timestamp, damage); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("AcquireBuffer failed, GSError=%{public}d", err); + return err; + } + inputBufferAvilQue_.push(buffer); + + if (!getUsage_) { + requestCfg_.usage = (buffer->memory->GetUsage() | requestCfg_.usage); + getUsage_ = true; + requestCfg_.width = buffer->memory->GetWidth(); + requestCfg_.height = buffer->memory->GetHeight(); + InitBuffers(); + } + + if (state_ == VPEAlgoState::RUNNING) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +void ImageProcessBufferConsumerListener::OnBufferAvailable() +{ + if (process_ != nullptr) { + process_->OnConsumerBufferAvailable(); + } +} + +ColorSpaceConverterVideoCallbackImpl::ColorSpaceConverterVideoCallbackImpl(Callback *callback, ArgumentType *userData) + : userData_(userData) +{ + callback_ = std::make_unique(); + if (callback_) { + callback_->onError = nullptr; + callback_->onOutputBufferAvailable = nullptr; + callback_->OnOutputFormatChanged = nullptr; + if (callback) { + callback_->onError = callback->onError; + callback_->onOutputBufferAvailable = callback->onOutputBufferAvailable; + callback_->OnOutputFormatChanged = callback->OnOutputFormatChanged; + } + } +} + +void ColorSpaceConverterVideoCallbackImpl::OnError(int32_t errorCode) +{ + if (callback_ && callback_->onError) { + callback_->onError(errorCode, userData_); + } +} + +void ColorSpaceConverterVideoCallbackImpl::OnState(int32_t state) +{ + (void)state; + return; +} + +void ColorSpaceConverterVideoCallbackImpl::OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + if (callback_ && callback_->onOutputBufferAvailable) { + CscvBufferFlag flagTemp = static_cast(flag); + callback_->onOutputBufferAvailable(index, flagTemp, userData_); + } +} + +void ColorSpaceConverterVideoCallbackImpl::OnOutputFormatChanged(const Format& format) +{ + if (callback_ && callback_->OnOutputFormatChanged) { + callback_->OnOutputFormatChanged(format, userData_); + } +} + +struct ColorSpaceConvertVideoHandleImpl { + std::shared_ptr obj; +}; + +int32_t ColorSpaceConvertVideoIsColorSpaceConversionSupported(const ArgumentType *input, const ArgumentType *output) +{ + struct CapabilityInfo { + int32_t colorSpaceType; + int32_t metadataType; + int32_t pixelFormat; + }; + VPE_LOGD("Enter"); + CHECK_AND_RETURN_RET_LOG(input != nullptr && output != nullptr, -1, "Input or output is null"); + auto inputInfo = *static_cast(input); + auto outputInfo = *static_cast(output); + FrameInfo inputFrameInfo; + inputFrameInfo.colorSpace.colorSpaceInfo = GetColorSpaceInfo(static_cast(inputInfo.colorSpaceType)); + inputFrameInfo.colorSpace.metadataType = + static_cast(inputInfo.metadataType); + inputFrameInfo.pixelFormat = static_cast(inputInfo.pixelFormat); + FrameInfo outputFrameInfo; + outputFrameInfo.colorSpace.colorSpaceInfo = GetColorSpaceInfo(static_cast(outputInfo.colorSpaceType)); + outputFrameInfo.colorSpace.metadataType = + static_cast(outputInfo.metadataType); + outputFrameInfo.pixelFormat = static_cast(outputInfo.pixelFormat); + VPE_LOGD("Get extension manager"); + auto &manager = Extension::ExtensionManager::GetInstance(); + VPE_LOGD("Query capability"); + manager.IncreaseInstance(); + auto supported = manager.IsColorSpaceConversionSupported(inputFrameInfo, outputFrameInfo); + manager.DecreaseInstance(); + VPE_LOGD("Exit"); + return supported ? 0 : -1; +} + +ColorSpaceConvertVideoHandle *ColorSpaceConvertVideoCreate() +{ + std::shared_ptr impl = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, "failed to init ColorSpaceConverterVideoImpl"); + + int32_t ret = impl->Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init ColorSpaceConverterVideoImpl"); + + auto handle = new ColorSpaceConvertVideoHandleImpl; + handle->obj = impl; + return static_cast(handle); +} + +void ColorSpaceConvertVideoDestroy(ColorSpaceConvertVideoHandle *handle) +{ + if (handle != nullptr) { + auto p = static_cast(handle); + delete p; + } +} + +int32_t ColorSpaceConvertVideoSetCallback( + ColorSpaceConvertVideoHandle *handle, ArgumentType *callback, ArgumentType *userData) +{ + int32_t ret = -1; + if (handle != nullptr && callback != nullptr) { + auto callbackPtr = static_cast(callback); + auto callbackImpl = std::make_shared(callbackPtr, userData); + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->SetCallback(std::static_pointer_cast(callbackImpl)); + } + return ret; +} + +int32_t ColorSpaceConvertVideoSetOutputSurface(ColorSpaceConvertVideoHandle *handle, ArgumentType *surface) +{ + int32_t ret = -1; + if (handle != nullptr && surface != nullptr) { + auto sf = static_cast *>(surface); + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->SetOutputSurface(*sf); + } + return ret; +} + +int32_t ColorSpaceConvertVideoCreateInputSurface(ColorSpaceConvertVideoHandle *handle, ArgumentType *surface) +{ + if (handle != nullptr) { + auto sf = static_cast *>(surface); + auto handlePtr = static_cast(handle); + *sf = handlePtr->obj->CreateInputSurface(); + return 0; + } + return -1; +} + +int32_t ColorSpaceConvertVideoSetParameter(ColorSpaceConvertVideoHandle *handle, ArgumentType *parameter) +{ + int32_t ret = -1; + if (handle != nullptr) { + Format *p = static_cast(parameter); + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->SetParameter(*p); + } + return ret; +} + +int32_t ColorSpaceConvertVideoGetParameter(ColorSpaceConvertVideoHandle *handle, ArgumentType *parameter) +{ + int32_t ret = -1; + if (handle != nullptr) { + Format *p = static_cast(parameter); + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->GetParameter(*p); + } + return ret; +} + +int32_t ColorSpaceConvertVideoConfigure(ColorSpaceConvertVideoHandle *handle, ArgumentType *configuration) +{ + int32_t ret = -1; + if (handle != nullptr && configuration != nullptr) { + Format *format = static_cast(configuration); + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Configure(*format); + } + return ret; +} + +int32_t ColorSpaceConvertVideoPrepare(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Prepare(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoStart(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Start(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoStop(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Stop(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoReset(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Reset(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoRelease(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Release(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoFlush(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->Flush(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoGetOutputFormat(ColorSpaceConvertVideoHandle *handle, ArgumentType* format) +{ + int32_t ret = -1; + if (handle != nullptr && format != nullptr) { + auto handlePtr = static_cast(handle); + auto formatPtr = static_cast(format); + ret = handlePtr->obj->GetOutputFormat(*formatPtr); + } + return ret; +} + +int32_t ColorSpaceConvertVideoNotifyEos(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->NotifyEos(); + } + return ret; +} + +int32_t ColorSpaceConvertVideoReleaseOutputBuffer(ColorSpaceConvertVideoHandle *handle, uint32_t index, bool render) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->ReleaseOutputBuffer(index, render); + } + return ret; +} + +int32_t ColorSpaceConvertVideoOnProducerBufferReleased(ColorSpaceConvertVideoHandle *handle) +{ + int32_t ret = -1; + if (handle != nullptr) { + auto handlePtr = static_cast(handle); + ret = handlePtr->obj->OnProducerBufferReleased(); + } + return ret; +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/colorspace_converter_video/include/colorspace_converter_video_impl.h b/framework/algorithm/colorspace_converter_video/include/colorspace_converter_video_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..160a1bc5b86e03f3390eeb41f58fc12425bfe7d2 --- /dev/null +++ b/framework/algorithm/colorspace_converter_video/include/colorspace_converter_video_impl.h @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLORSPACE_CONVERTER_VIDEO_IMPL_H +#define COLORSPACE_CONVERTER_VIDEO_IMPL_H + +#include +#include +#include +#include +#include +#include "colorspace_converter_video.h" +#include "meta/format.h" +#include "surface.h" +#include "sync_fence.h" +#include "colorspace_converter_video_common.h" +#include "colorspace_converter.h" +#include "algorithm_video_common.h" +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class ColorSpaceConverterVideoImpl : public ColorSpaceConverterVideo { +public: + ColorSpaceConverterVideoImpl(); + ~ColorSpaceConverterVideoImpl(); + int32_t Init(); + int32_t Init(std::shared_ptr openglContext); + int32_t SetCallback(const std::shared_ptr &callback) override; + int32_t SetOutputSurface(sptr surface) override; + sptr CreateInputSurface() override; + int32_t Configure(const Format &format) override; + int32_t Prepare() override; + int32_t Start() override; + int32_t Stop() override; + int32_t Reset() override; + int32_t Release() override; + int32_t NotifyEos() override; + int32_t ReleaseOutputBuffer(uint32_t index, bool render) override; + int32_t SetParameter(const Format ¶meter) override; + int32_t GetParameter(Format ¶meter) override; + int32_t Flush() override; + int32_t GetOutputFormat(Format &format) override; + GSError OnConsumerBufferAvailable(); + GSError OnProducerBufferReleased(); + +private: + struct SurfaceBufferWrapper { + public: + SurfaceBufferWrapper() = default; + ~SurfaceBufferWrapper() = default; + + sptr memory{nullptr}; + CscvBufferFlag bufferFlag{CSCV_BUFFER_FLAG_NONE}; + sptr fence{nullptr}; + int64_t timestamp; + }; + void InitBuffers(); + bool WaitProcessing(); + bool AcquireInputOutputBuffers( + std::shared_ptr &inputBuffer, std::shared_ptr &outputBuffer); + void DoTask(); + void OnTriggered(); + void Process(std::shared_ptr inputBuffer, std::shared_ptr outputBuffer); + int32_t ConfigureColorSpace(const Format &format); + int32_t AttachToNewSurface(sptr newSurface); + int32_t SetOutputSurfaceConfig(sptr surface); + int32_t SetOutputSurfaceRunning(sptr newSurface); + int32_t GetReleaseOutBuffer(); + + std::atomic state_{VPEAlgoState::UNINITIALIZED}; + std::shared_ptr cb_{nullptr}; + std::shared_ptr csc_{nullptr}; + std::mutex mutex_; + Format format_; + Format outputFormat_; + bool getUsage_{false}; + + // task + std::mutex mtxTaskDone_; + std::condition_variable cvTaskDone_; + std::shared_ptr taskThread_{nullptr}; + std::condition_variable cvTaskStart_; + std::mutex mtxTaskStart_; + std::atomic isRunning_{false}; + std::atomic isProcessing_{false}; + std::atomic isEos_{false}; + + // surface + std::queue> outputBufferAvilQue_; + std::queue> inputBufferAvilQue_; + std::queue> renderBufferAvilQue_; + using RenderBufferAvilMapType = std::map>; + RenderBufferAvilMapType renderBufferAvilMap_; + RenderBufferAvilMapType outputBufferAvilQueBak_; + RenderBufferAvilMapType renderBufferMapBak_; + std::mutex onBqMutex_; // inputsruface buffer + std::mutex renderQueMutex_; // outputsruface buffer + std::mutex surfaceChangeMutex_; + std::mutex surfaceChangeMutex2_; + sptr inputSurface_{nullptr}; + sptr outputSurface_{nullptr}; + static constexpr size_t MAX_BUFFER_CNT{5}; + uint32_t outBufferCnt_{MAX_BUFFER_CNT}; + uint32_t inBufferCnt_{MAX_BUFFER_CNT}; + static constexpr size_t MAX_SURFACE_SEQUENCE{std::numeric_limits::max()}; + uint32_t lastSurfaceSequence_{MAX_SURFACE_SEQUENCE}; + BufferRequestConfig requestCfg_{}; + BufferFlushConfig flushCfg_{}; + + // colorsapce + std::vector colorSpaceVec_; + int32_t hdrType_{0}; + std::vector hdrVec_; + int32_t lastOutputWidth_ = 0; + int32_t lastOutputHeight_ = 0; +}; + +class ImageProcessBufferConsumerListener : public OHOS::IBufferConsumerListener { +public: + explicit ImageProcessBufferConsumerListener(ColorSpaceConverterVideoImpl *process) : process_(process) + {} + void OnBufferAvailable() override; + +private: + ColorSpaceConverterVideoImpl *process_; +}; + +class ColorSpaceConverterVideoCallbackImpl : public ColorSpaceConverterVideoCallback { +public: + struct Callback { + using OnErrorCallback = std::function; + using OnOutputBufferAvailableCallback = std::function; + using OnOutputFormatChangedCallback = std::function; + + OnErrorCallback onError; + OnOutputBufferAvailableCallback onOutputBufferAvailable; + OnOutputFormatChangedCallback OnOutputFormatChanged; + }; + + ColorSpaceConverterVideoCallbackImpl(Callback *callback, ArgumentType *userData); + void OnError(int32_t errorCode); + void OnState(int32_t state); + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag); + void OnOutputFormatChanged(const Format& format); + +private: + std::unique_ptr callback_{nullptr}; + ArgumentType *userData_{nullptr}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // COLORSPACE_CONVERTER_VIDEO_IMPL_H diff --git a/framework/algorithm/common/algorithm_common.cpp b/framework/algorithm/common/algorithm_common.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c9711ddd8e235fd9174c4206749e6b297c7c540e --- /dev/null +++ b/framework/algorithm/common/algorithm_common.cpp @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_common.h" +#include "securec.h" +#include "vpe_log.h" +#include "vpe_context.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +int SetupOpengl(std::shared_ptr &openglHandle) +{ + std::shared_ptr openglContextPtr = std::make_shared(); + openglContextPtr->display = eglGetDisplay(EGL_DEFAULT_DISPLAY); + if (openglContextPtr->display == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("Get display failed!"); + return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; + } + EGLint major; + EGLint minor; + if (eglInitialize(openglContextPtr->display, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("eglInitialize failed!"); + return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; + } + openglHandle = openglContextPtr; + return static_cast(VPE_ALGO_ERR_OK); +} + +VPEAlgoErrCode ColorSpaceDescription::Create(const sptr &buffer, ColorSpaceDescription &desc) +{ + CHECK_AND_RETURN_RET_LOG(nullptr != buffer, VPE_ALGO_ERR_INVALID_VAL, "Get an invalid buffer"); + + std::vector vec; + int32_t err = buffer->GetMetadata(ATTRKEY_COLORSPACE_INFO, vec); + CHECK_AND_RETURN_RET_LOG(GSERROR_OK == err, VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "Get metadata colorspace info failed, err: %{public}d", err); + CHECK_AND_RETURN_RET_LOG(sizeof(desc.colorSpaceInfo) == (vec.size()), VPE_ALGO_ERR_INVALID_VAL, + "memcpy_s failed, desc.colorSpaceInfo size != vec size;"); + errno_t ret = memcpy_s(&desc.colorSpaceInfo, sizeof(desc.colorSpaceInfo), vec.data(), vec.size()); + if (ret != EOK) { + VPE_LOGE("memcpy_s failed, err = %d\n", ret); + return VPE_ALGO_ERR_INVALID_VAL; + } + + vec.clear(); + err = buffer->GetMetadata(ATTRKEY_HDR_METADATA_TYPE, vec); + CHECK_AND_RETURN_RET_LOG(GSERROR_OK == err, VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "Get hdr metadata type failed, err: %{public}d", err); + CHECK_AND_RETURN_RET_LOG(sizeof(desc.metadataType) == (vec.size()), VPE_ALGO_ERR_INVALID_VAL, + "memcpy_s failed, desc.metadataType size != vec size;"); + ret = memcpy_s(&desc.metadataType, sizeof(desc.metadataType), vec.data(), vec.size()); + if (ret != EOK) { + VPE_LOGE("memcpy_s failed, err = %d\n", ret); + return VPE_ALGO_ERR_INVALID_VAL; + } + + return VPE_ALGO_ERR_OK; +} + +uint32_t GetColorSpaceType(const CM_ColorSpaceInfo &colorSpaceInfo) +{ + CHECK_AND_LOG(colorSpaceInfo.primaries <= CM_ColorPrimaries::COLORPRIMARIES_ADOBERGB && + colorSpaceInfo.transfunc <= CM_TransFunc::TRANSFUNC_GAMMA2_4 && + colorSpaceInfo.matrix <= CM_Matrix::MATRIX_BT2100_ICTCP && + colorSpaceInfo.range <= CM_Range::RANGE_EXTEND, + "Invalid colorSpaceInfo, primaries : %{public}d, transfunc: %{public}d, matrix: %{public}d, range: %{public}d", + colorSpaceInfo.primaries, colorSpaceInfo.transfunc, colorSpaceInfo.matrix, colorSpaceInfo.range); + return ((static_cast(colorSpaceInfo.primaries) << COLORPRIMARIES_OFFSET) + + (static_cast(colorSpaceInfo.transfunc) << TRANSFUNC_OFFSET) + + (static_cast(colorSpaceInfo.matrix) << MATRIX_OFFSET) + + (static_cast(colorSpaceInfo.range) << RANGE_OFFSET)); +} + +CM_ColorSpaceInfo GetColorSpaceInfo(const uint32_t colorSpaceType) +{ + CM_ColorSpaceInfo info; + info.primaries = static_cast((colorSpaceType & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET); + info.transfunc = static_cast((colorSpaceType & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET); + info.matrix = static_cast((colorSpaceType & MATRIX_MASK) >> MATRIX_OFFSET); + info.range = static_cast((colorSpaceType & RANGE_MASK) >> RANGE_OFFSET); + return info; +} + +bool ColorSpaceDescription::operator < (const ColorSpaceDescription &desc) const +{ + uint32_t thisColorSpaceType = GetColorSpaceType(colorSpaceInfo); + uint32_t descColorSpaceType = GetColorSpaceType(desc.colorSpaceInfo); + return (thisColorSpaceType < descColorSpaceType || + ((thisColorSpaceType == descColorSpaceType) && (metadataType < desc.metadataType))); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/common/algorithm_utils.cpp b/framework/algorithm/common/algorithm_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..50aeaa70fc69eb91f9ded38d1093a9ef2af558e3 --- /dev/null +++ b/framework/algorithm/common/algorithm_utils.cpp @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_utils.h" + +#include +#include "vpe_log.h" +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::unordered_map ERROR_STR_MAP = { + { VPE_ALGO_ERR_OK, VPE_TO_STR(VPE_ALGO_ERR_OK) }, + { VPE_ALGO_ERR_NO_MEMORY, VPE_TO_STR(VPE_ALGO_ERR_NO_MEMORY) }, + { VPE_ALGO_ERR_INVALID_OPERATION, VPE_TO_STR(VPE_ALGO_ERR_INVALID_OPERATION) }, + { VPE_ALGO_ERR_INVALID_VAL, VPE_TO_STR(VPE_ALGO_ERR_INVALID_VAL) }, + { VPE_ALGO_ERR_UNKNOWN, VPE_TO_STR(VPE_ALGO_ERR_UNKNOWN) }, + { VPE_ALGO_ERR_INIT_FAILED, VPE_TO_STR(VPE_ALGO_ERR_INIT_FAILED) }, + { VPE_ALGO_ERR_EXTENSION_NOT_FOUND, VPE_TO_STR(VPE_ALGO_ERR_EXTENSION_NOT_FOUND) }, + { VPE_ALGO_ERR_EXTENSION_INIT_FAILED, VPE_TO_STR(VPE_ALGO_ERR_EXTENSION_INIT_FAILED) }, + { VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED, VPE_TO_STR(VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED) }, + { VPE_ALGO_ERR_NOT_IMPLEMENTED, VPE_TO_STR(VPE_ALGO_ERR_NOT_IMPLEMENTED) }, + { VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, VPE_TO_STR(VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED) }, + { VPE_ALGO_ERR_INVALID_STATE, VPE_TO_STR(VPE_ALGO_ERR_INVALID_STATE) }, + { VPE_ALGO_ERR_EXTEND_START, VPE_TO_STR(VPE_ALGO_ERR_EXTEND_START) }, +}; +const std::unordered_map STATE_STR_MAP = { + { VPEAlgoState::UNINITIALIZED, VPE_TO_STR(VPEAlgoState::UNINITIALIZED) }, + { VPEAlgoState::INITIALIZED, VPE_TO_STR(VPEAlgoState::INITIALIZED) }, + { VPEAlgoState::CONFIGURING, VPE_TO_STR(VPEAlgoState::CONFIGURING) }, + { VPEAlgoState::CONFIGURED, VPE_TO_STR(VPEAlgoState::CONFIGURED) }, + { VPEAlgoState::STOPPED, VPE_TO_STR(VPEAlgoState::STOPPED) }, + { VPEAlgoState::RUNNING, VPE_TO_STR(VPEAlgoState::RUNNING) }, + { VPEAlgoState::EOS, VPE_TO_STR(VPEAlgoState::EOS) }, + { VPEAlgoState::ERROR, VPE_TO_STR(VPEAlgoState::ERROR) }, +}; +} + +std::string AlgorithmUtils::ToString(VPEAlgoErrCode errorCode) +{ + auto it = ERROR_STR_MAP.find(errorCode); + if (it == ERROR_STR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid error code:%{public}d", errorCode); + return "Unsupported error:" + std::to_string(static_cast(errorCode)); + } + return it->second; +} + +std::string AlgorithmUtils::ToString(VPEAlgoState state) +{ + auto it = STATE_STR_MAP.find(state); + if (it == STATE_STR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid state:%{public}d", state); + return "Unsupported state:" + std::to_string(static_cast(state)); + } + return it->second; +} + +bool AlgorithmUtils::CopySurfaceBufferToSurfaceBuffer(const sptr& srcBuffer, + sptr& destBuffer) +{ + CHECK_AND_RETURN_RET_LOG(srcBuffer != nullptr && destBuffer != nullptr, false, + "srcBuffer or destBuffer is nullptr"); + CHECK_AND_RETURN_RET_LOG(srcBuffer->GetFormat() == destBuffer->GetFormat(), false, "buffer format is not same."\ + "input format:%{public}d,output format:%{public}d", srcBuffer->GetFormat(), destBuffer->GetFormat()); + CHECK_AND_RETURN_RET_LOG((srcBuffer->GetStride() == destBuffer->GetStride()) && + (srcBuffer->GetHeight() == destBuffer->GetHeight()) && (srcBuffer->GetWidth() == destBuffer->GetWidth()), + false, "buffer stride and height is not same. input height:%{public}d,output height:%{public}d,"\ + "input width:%{public}d,output width:%{public}d,input stride:%{public}d,output stride:%{public}d", + srcBuffer->GetHeight(), destBuffer->GetHeight(), srcBuffer->GetWidth(), destBuffer->GetWidth(), + srcBuffer->GetStride(), destBuffer->GetStride()); + CHECK_AND_RETURN_RET_LOG(srcBuffer->GetSize() == destBuffer->GetSize(), false, "buffer size is not same."\ + "input size:%{public}u,output size:%{public}u", srcBuffer->GetSize(), destBuffer->GetSize()); + if (memcpy_s(static_cast(destBuffer->GetVirAddr()), destBuffer->GetSize(), + static_cast(srcBuffer->GetVirAddr()), srcBuffer->GetSize()) != EOK) { + VPE_LOGE("Fail to copy surfaceBuffer to surfaceBuffer!"); + return false; + } + std::vector attrInfo{}; + std::vector keys{}; + GSError ret; + if (srcBuffer->ListMetadataKeys(keys) == GSERROR_OK && !keys.empty()) { + for (size_t i = 0; i < keys.size(); i++) { + if (srcBuffer->GetMetadata(keys[i], attrInfo) == GSERROR_OK && !attrInfo.empty()) { + ret = destBuffer->SetMetadata(keys[i], attrInfo); + CHECK_AND_RETURN_RET_LOG(ret == GSERROR_OK, false, "Fail to set metadata."); + } + attrInfo.clear(); + } + } + return true; +} \ No newline at end of file diff --git a/framework/algorithm/common/algorithm_video.cpp b/framework/algorithm/common/algorithm_video.cpp new file mode 100644 index 0000000000000000000000000000000000000000..de4d16b210b34e9a891ec76f0179e54d624d4538 --- /dev/null +++ b/framework/algorithm/common/algorithm_video.cpp @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_video.h" + +#include +#include + +#include "vpe_log.h" + +// NOTE: Add feature altorithm header files here +// Feature altorithm header files begin +#include "detail_enhancer_video_fwk.h" +// Feature altorithm header files end + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +std::unordered_map(void)>> g_creators = { + // NOTE: Add feature altorithm creator here + // Feature altorithm header creator begin + { VIDEO_TYPE_DETAIL_ENHANCER, &DetailEnhancerVideoFwk::Create }, + // Feature altorithm header creator end +}; +} + +std::shared_ptr VpeVideo::Create(uint32_t type) +{ + auto it = g_creators.find(type); + if (it == g_creators.end()) { + VPE_LOGE("Unsupported type: 0x%{public}x", type); + return nullptr; + } + return it->second(); +} + +VPEAlgoErrCode VpeVideo::RegisterCallback([[maybe_unused]] const std::shared_ptr& callback) +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::SetOutputSurface([[maybe_unused]] const sptr& surface) +{ + return VPE_ALGO_ERR_OK; +} + +sptr VpeVideo::GetInputSurface() +{ + return nullptr; +} + +VPEAlgoErrCode VpeVideo::SetParameter([[maybe_unused]] const Format& parameter) +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::GetParameter([[maybe_unused]] Format& parameter) +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::Start() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::Stop() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::Flush() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::Enable() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::Disable() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::NotifyEos() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::ReleaseOutputBuffer([[maybe_unused]] uint32_t index, [[maybe_unused]] bool render) +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideo::RenderOutputBufferAtTime([[maybe_unused]] uint32_t index, + [[maybe_unused]] int64_t renderTimestamp) +{ + return VPE_ALGO_ERR_OK; +} diff --git a/framework/algorithm/common/algorithm_video_common.cpp b/framework/algorithm/common/algorithm_video_common.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a26036b6ca9b0a80a17a8c0a9abb08e49d2805be --- /dev/null +++ b/framework/algorithm/common/algorithm_video_common.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_video_common.h" + +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +void VpeVideoCallback::OnError([[maybe_unused]] VPEAlgoErrCode errorCode) +{ +} + +void VpeVideoCallback::OnState([[maybe_unused]] VPEAlgoState state) +{ +} + +void VpeVideoCallback::OnOutputFormatChanged([[maybe_unused]] const Format& format) +{ +} + +void VpeVideoCallback::OnEffectChange([[maybe_unused]] uint32_t type) +{ +} + +void VpeVideoCallback::OnOutputBufferAvailable([[maybe_unused]] uint32_t index, [[maybe_unused]] VpeBufferFlag flag) +{ +} + +void VpeVideoCallback::OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info) +{ + VPE_LOGD("Call OnOutputBufferAvailable(%{public}u,%{public}d)", index, info.flag); + OnOutputBufferAvailable(index, info.flag); +} diff --git a/framework/algorithm/common/algorithm_video_impl.cpp b/framework/algorithm/common/algorithm_video_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..95749016175ab542aff6fe77c6841d64352f973d --- /dev/null +++ b/framework/algorithm/common/algorithm_video_impl.cpp @@ -0,0 +1,744 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_video_impl.h" + +#include +#include + +#include "vpe_log.h" +#include "vpe_trace.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace std::chrono_literals; + +namespace { +constexpr uint32_t BUFFER_QUEUE_SIZE = 5; + +std::string ToString(const sptr& buffer) +{ + if (buffer == nullptr) { + return "null"; + } + std::stringstream stream; + stream << "id:" << buffer->GetSeqNum() << " " << buffer->GetWidth() << "x" << buffer->GetHeight() << + " format:" << buffer->GetFormat() << " usage:0x" << std::hex << buffer->GetUsage(); + return stream.str(); +} + +std::string ToString(const BufferRequestConfig& requestCfg) +{ + std::stringstream stream; + stream << requestCfg.width << "x" << requestCfg.height << " format:" << requestCfg.format << + " usage:0x" << std::hex << requestCfg.usage; + return stream.str(); +} +} // namespace + +VpeVideoImpl::~VpeVideoImpl() +{ + VPE_LOGD("Step in"); + Deinitialize(); +} + +VPEAlgoErrCode VpeVideoImpl::RegisterCallback(const std::shared_ptr& callback) +{ + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Invalid input: callback is null!"); + + return ExecuteWhenIdle( + [this, callback]() { + cb_ = callback; + return VPE_ALGO_ERR_OK; + }, "Registration of callbacks during running is not allowed!"); +} + +VPEAlgoErrCode VpeVideoImpl::SetOutputSurface(const sptr& surface) +{ + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Invalid input: surface is null!"); + CHECK_AND_RETURN_RET_LOG(!surface->IsConsumer(), VPE_ALGO_ERR_INVALID_VAL, + "Invalid input: surface is NOT producer!"); + CHECK_AND_RETURN_RET_LOG(IsProducerSurfaceValid(surface), VPE_ALGO_ERR_INVALID_VAL, + "Invalid input: surface is invalid!"); + + std::lock_guard lock(lock_); + GSError err = surface->RegisterReleaseListener([this](sptr&) { return OnProducerBufferReleased(); }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener failed!"); + surface->SetQueueSize(BUFFER_QUEUE_SIZE); + VPE_LOGI("Set output buffer queue size to %{public}u", BUFFER_QUEUE_SIZE); + producer_ = surface; + auto ret = UpdateRequestCfg(surface, requestCfg_); + VPE_LOGD("requestCfg_({ %{public}s })", ToString(requestCfg_).c_str()); + return ret; +} + +sptr VpeVideoImpl::GetInputSurface() +{ + sptr producerSurface; + if (ExecuteWhenIdle([this, &producerSurface]() { + producerSurface = CreateConsumerSurfaceLocked(); + return VPE_ALGO_ERR_OK; + }, "Getting a surface during running is not allowed!") != VPE_ALGO_ERR_OK) { + return nullptr; + } + return producerSurface; +} + +VPEAlgoErrCode VpeVideoImpl::Start() +{ + return ExecuteWhenIdle( + [this]() { + if (consumer_ == nullptr || producer_ == nullptr || cb_ == nullptr) { + VPE_LOGE("The input surface, output surface or callback is NOT ready!"); + return VPE_ALGO_ERR_INVALID_OPERATION; + } + state_ = VPEState::RUNNING; + OnStateLocked(VPEAlgoState::RUNNING); + return VPE_ALGO_ERR_OK; + }, "Already start!"); +} + +VPEAlgoErrCode VpeVideoImpl::Stop() +{ + auto err = ExecuteWhenRunning([this]() { + state_ = VPEState::STOPPING; + cv_.notify_one(); + return VPE_ALGO_ERR_OK; + }, "Already stop!"); + return err; +} + +VPEAlgoErrCode VpeVideoImpl::Flush() +{ + return ExecuteWhenNotIdle( + [this]() { + std::queue tempQueue1; + std::queue tempQueue2; + { + std::lock_guard bufferLock(bufferLock_); + consumerBufferQueue_.swap(tempQueue1); + while (!renderBufferQueue_.empty()) { + producerBufferQueue_.push(renderBufferQueue_.front()); + renderBufferQueue_.pop(); + } + attachBufferQueue_.swap(tempQueue2); + attachBufferIDs_.clear(); + } + ClearConsumerLocked(tempQueue1); + ClearConsumerLocked(tempQueue2); + return VPE_ALGO_ERR_OK; + }, "Flush must be called during running!"); +} + +VPEAlgoErrCode VpeVideoImpl::Enable() +{ + CHECK_AND_RETURN_RET_LOG(!isEnable_.load(), VPE_ALGO_ERR_INVALID_OPERATION, "Already enabled!"); + + std::lock_guard lock(lock_); + isEnable_ = true; + isEnableChange_ = true; + if (producer_ == nullptr) { + return VPE_ALGO_ERR_OK; + } + + auto ret = UpdateRequestCfg(producer_, requestCfg_); + VPE_LOGD("requestCfg_({ %{public}s })", ToString(requestCfg_).c_str()); + return ret; +} + +VPEAlgoErrCode VpeVideoImpl::Disable() +{ + CHECK_AND_RETURN_RET_LOG(isEnable_.load(), VPE_ALGO_ERR_INVALID_OPERATION, "Already disabled!"); + + std::lock_guard lock(lock_); + isEnable_ = false; + isEnableChange_ = true; + VPE_LOGD("requestCfg_({ %{public}s })", ToString(requestCfg_).c_str()); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideoImpl::NotifyEos() +{ + return ExecuteWhenNotIdle( + [this]() { + { + std::lock_guard buffferlock(bufferLock_); + SurfaceBufferInfo bufferInfo{}; + bufferInfo.bufferFlag = VPE_BUFFER_FLAG_EOS; + consumerBufferQueue_.push(bufferInfo); + } + cv_.notify_one(); + return VPE_ALGO_ERR_OK; + }, "Notify EOS must be called during running!"); +} + +VPEAlgoErrCode VpeVideoImpl::ReleaseOutputBuffer(uint32_t index, bool render) +{ + return ExecuteWhenNotIdle( + [this, index, render]() { return RenderOutputBufferLocked(index, -1, render); }, + "Release output buffer must be called during running!"); +} + +VPEAlgoErrCode VpeVideoImpl::RenderOutputBufferAtTime(uint32_t index, int64_t renderTimestamp) +{ + return ExecuteWhenNotIdle( + [this, index, renderTimestamp]() { return RenderOutputBufferLocked(index, renderTimestamp, true); }, + "Render output buffer must be called during running!"); +} + +bool VpeVideoImpl::IsInitialized() const +{ + return isInitialized_.load(); +} + +VPEAlgoErrCode VpeVideoImpl::Initialize() +{ + std::lock_guard lock(lock_); + if (isInitialized_.load()) { + VPE_LOGD("Already initialize!"); + return VPE_ALGO_ERR_OK; + } + VPE_LOGD("Start to initializing..."); + isRunning_ = true; + worker_ = std::thread([this]() { + while (isRunning_.load()) { + { + std::unique_lock bufferLock(bufferLock_); + if (!cv_.wait_for(bufferLock, 200s, [this] { + return !isRunning_.load() || state_.load() == VPEState::STOPPING || + (!producerBufferQueue_.empty() && !consumerBufferQueue_.empty()); + })) { + VPE_LOGI("Video processing timeout."); + continue; + } + + if (!isRunning_.load()) { + VPE_LOGI("Video processing clear."); + break; + } + if (producerBufferQueue_.empty() || consumerBufferQueue_.empty()) { + CheckSpuriousWakeup(); + continue; + } + } + ProcessBuffers(); + CheckStopping(); + }; + }); + auto errorCode = OnInitialize(); + isInitialized_ = true; + VPE_LOGD("OnInitialize() return %{public}d.", errorCode); + return errorCode; +} + +VPEAlgoErrCode VpeVideoImpl::Deinitialize() +{ + std::lock_guard lock(lock_); + VPE_LOGD("Start to deinitializing..."); + if (!isInitialized_.load()) { + VPE_LOGD("Already deinitialize!"); + return VPE_ALGO_ERR_OK; + } + isInitialized_ = false; + VPEAlgoErrCode errorCode = OnDeinitialize(); + if (state_.load() == VPEState::RUNNING) { + state_ = VPEState::STOPPING; + } + isRunning_ = false; + cv_.notify_one(); + if (worker_.joinable()) { + worker_.join(); + } + CheckStoppingLocked(); + cb_ = nullptr; + ClearBufferQueues(); + if (consumer_ != nullptr) { + consumer_->UnregisterConsumerListener(); + consumer_ = nullptr; + } + if (producer_ != nullptr) { + producer_->UnRegisterReleaseListener(); + producer_->CleanCache(true); + producer_ = nullptr; + } + VPE_LOGD("OnDeinitialize() return %{public}d.", errorCode); + return errorCode; +} + +void VpeVideoImpl::RefreshBuffers() +{ + std::lock_guard lock(lock_); + if (state_.load() != VPEState::RUNNING) { + VPE_LOGD("Skip refreshing during Non-Running."); + return; + } + if (!isEnable_.load()) { + VPE_LOGD("Skip refreshing when it is disabled."); + return; + } + + if (!isBufferQueueReady_) { + VPE_LOGD("Skip refreshing when buffer queue is not ready."); + return; + } + + VPE_LOGD("Clear all buffer queues."); + ClearBufferQueues(); +} + +void VpeVideoImpl::OnOutputFormatChanged(const Format& format) +{ + std::lock_guard lock(lock_); + if (cb_ != nullptr) { + VPE_LOGD("OnOutputFormatChanged()"); + cb_->OnOutputFormatChanged(format); + } +} + +VPEAlgoErrCode VpeVideoImpl::OnInitialize() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideoImpl::OnDeinitialize() +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VpeVideoImpl::Process([[maybe_unused]] const sptr& sourceImage, + [[maybe_unused]] sptr& destinationImage) +{ + return VPE_ALGO_ERR_OK; +} + +bool VpeVideoImpl::IsProducerSurfaceValid([[maybe_unused]] const sptr& surface) +{ + return true; +} + +VPEAlgoErrCode VpeVideoImpl::UpdateRequestCfg([[maybe_unused]] const sptr& surface, + [[maybe_unused]] BufferRequestConfig& requestCfg) +{ + return VPE_ALGO_ERR_OK; +} + +void VpeVideoImpl::UpdateRequestCfg([[maybe_unused]] const sptr& consumerBuffer, + [[maybe_unused]] BufferRequestConfig& requestCfg) +{ +} + +void VpeVideoImpl::OnErrorLocked(VPEAlgoErrCode errorCode) +{ + if (cb_ != nullptr) { + VPE_LOGD("OnError(%{public}d)", errorCode); + cb_->OnError(errorCode); + } +} + +void VpeVideoImpl::OnStateLocked(VPEAlgoState state) +{ + if (cb_ != nullptr) { + VPE_LOGD("OnState(%{public}d)", state); + cb_->OnState(state); + } +} + +void VpeVideoImpl::OnEffectChange(uint32_t type) +{ + if (cb_ != nullptr) { + VPE_LOGD("OnEffectChange(0x%{public}x)", type); + cb_->OnEffectChange(type); + } +} + +void VpeVideoImpl::OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info) +{ + if (cb_ != nullptr) { + cb_->OnOutputBufferAvailable(index, info); + } +} + +GSError VpeVideoImpl::OnConsumerBufferAvailable() +{ + std::lock_guard lock(lock_); + if (state_.load() != VPEState::RUNNING) { + VPE_LOGD("NOT running now!"); + return GSERROR_INVALID_OPERATING; + } + + SurfaceBufferInfo bufferInfo{}; + int releaseFence = -1; + OHOS::Rect damage; + GSError err = consumer_->AcquireBuffer(bufferInfo.buffer, releaseFence, bufferInfo.timestamp, damage); + if (err != GSERROR_OK || bufferInfo.buffer == nullptr) { + VPE_LOGE("Failed to acquire buffer!"); + return err; + } + VPE_LOGD("consumer_->AcquireBuffer({ %{public}s })", ToString(bufferInfo.buffer).c_str()); + + { + std::lock_guard buffferlock(bufferLock_); + if (!isBufferQueueReady_) { + isBufferQueueReady_ = true; + requestCfg_.usage = bufferInfo.buffer->GetUsage(); + + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 32; // 32 bits align + UpdateRequestCfg(bufferInfo.buffer, requestCfg_); + VPE_LOGD("Use requestCfg_({ %{public}s }) to prepare buffers.", ToString(requestCfg_).c_str()); + PrepareBuffers(); + } + consumerBufferQueue_.push(bufferInfo); + } + cv_.notify_one(); + return GSERROR_OK; +} + +GSError VpeVideoImpl::OnProducerBufferReleased() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(consumer_ != nullptr || producer_ != nullptr, GSERROR_OK, + "Input or output surface is null!"); + + { + std::lock_guard bufferLock(bufferLock_); + GSError err = GSERROR_OK; + CHECK_AND_RETURN_RET_LOG(RequestBuffer(err), err, "Failed to request buffer!"); + } + if (state_.load() != VPEState::IDLE) { + cv_.notify_one(); + } + return GSERROR_OK; +} + +VPEAlgoErrCode VpeVideoImpl::RenderOutputBufferLocked(uint32_t index, int64_t renderTimestamp, bool render) +{ + std::unique_lock bufferLock(bufferLock_); + SurfaceBufferInfo bufferInfo{}; + bool isFound = PopBuffer(renderBufferQueue_, index, bufferInfo, [](sptr&) {}); + bufferLock.unlock(); + + CHECK_AND_RETURN_RET_LOG(isFound, VPE_ALGO_ERR_INVALID_PARAM, "Invalid input: index=%{public}u!", index); + if (render) { + BufferFlushConfig flushcfg{}; + flushcfg.damage.w = bufferInfo.buffer->GetWidth(); + flushcfg.damage.h = bufferInfo.buffer->GetHeight(); + flushcfg.timestamp = (renderTimestamp == -1) ? bufferInfo.timestamp : renderTimestamp; + auto ret = producer_->FlushBuffer(bufferInfo.buffer, -1, flushcfg); + VPE_LOGD("producer_->FlushBuffer({ %{public}s })=%{public}d", ToString(bufferInfo.buffer).c_str(), ret); + } else { + bufferLock.lock(); + producerBufferQueue_.push(bufferInfo); + } + return VPE_ALGO_ERR_OK; +} + +sptr VpeVideoImpl::CreateConsumerSurfaceLocked() +{ + CHECK_AND_RETURN_RET_LOG(consumer_ == nullptr, nullptr, "input surface already exists!"); + + consumer_ = Surface::CreateSurfaceAsConsumer("VideoProcessingSurface"); + CHECK_AND_RETURN_RET_LOG(consumer_ != nullptr, nullptr, "Failed to create consumer surface!"); + sptr listener = new(std::nothrow) ConsumerListener(shared_from_this()); + CHECK_AND_RETURN_RET_LOG(listener != nullptr, nullptr, "Failed to create consumer surface listener!"); + CHECK_AND_RETURN_RET_LOG(consumer_->RegisterConsumerListener(listener) == GSERROR_OK, nullptr, + "Failed to register consumer surface listener!"); + + sptr producer = consumer_->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + CHECK_AND_RETURN_RET_LOG(producerSurface != nullptr, nullptr, "Failed to create producer surface!"); + producerSurface->SetDefaultUsage(BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | + BUFFER_USAGE_MEM_DMA | BUFFER_USAGE_MEM_MMZ_CACHE | BUFFER_USAGE_HW_COMPOSER); + consumer_->SetQueueSize(BUFFER_QUEUE_SIZE); + VPE_LOGI("Set input buffer queue size to %{public}u", BUFFER_QUEUE_SIZE); + + return producerSurface; +} + +bool VpeVideoImpl::RequestBuffer(GSError& errorCode) +{ + SurfaceBufferInfo bufferInfo{}; + int releaseFence = -1; + errorCode = producer_->RequestBuffer(bufferInfo.buffer, releaseFence, requestCfg_); + if (errorCode != GSERROR_OK || bufferInfo.buffer == nullptr) { + VPE_LOGW("Failed to producer_->RequestBuffer(requestCfg={ %{public}s })", ToString(requestCfg_).c_str()); + return false; + } + producerBufferQueue_.push(bufferInfo); + if (!isEnable_.load()) { + VPE_LOGD("producer_->RequestBuffer({ %{public}s }) and try to release.", ToString(bufferInfo.buffer).c_str()); + auto it = attachBufferIDs_.find(bufferInfo.buffer->GetSeqNum()); + if (it != attachBufferIDs_.end()) { + PopBuffer(attachBufferQueue_, bufferInfo.buffer->GetSeqNum(), bufferInfo, + [this](sptr& buffer) { + CHECK_AND_RETURN_LOG(buffer != nullptr, "Attach buffer is null!"); + attachBufferIDs_.erase(buffer->GetSeqNum()); + auto ret = consumer_->ReleaseBuffer(buffer, -1); + VPE_LOGD("consumer_->ReleaseBuffer({ %{public}s })=%{public}d cache=%{public}zu", + ToString(buffer).c_str(), ret, attachBufferQueue_.size()); + }); + } + } else { + VPE_LOGD("producer_->RequestBuffer({ %{public}s })", ToString(bufferInfo.buffer).c_str()); + if (attachBufferQueue_.empty()) { + return true; + } + bufferInfo = attachBufferQueue_.front(); + attachBufferQueue_.pop(); + if (bufferInfo.buffer != nullptr) { + attachBufferIDs_.erase(bufferInfo.buffer->GetSeqNum()); + auto ret = consumer_->ReleaseBuffer(bufferInfo.buffer, -1); + VPE_LOGD("consumer_->ReleaseBuffer({ %{public}s })=%{public}d cache->%{public}zu", + ToString(bufferInfo.buffer).c_str(), ret, attachBufferQueue_.size()); + } + } + return true; +} + +void VpeVideoImpl::PrepareBuffers() +{ + for (uint32_t i = 0; i < producer_->GetQueueSize(); i++) { + GSError errorCode; + RequestBuffer(errorCode); + VPE_LOGD("<%{public}u> RequestBuffer({ %{public}s })=%{public}d", i, ToString(requestCfg_).c_str(), errorCode); + } +} + +void VpeVideoImpl::ProcessBuffers() +{ + sptr consumer; + { + std::lock_guard lock(lock_); + if (consumer_ == nullptr) { + return; + } + consumer = consumer_; + } + while (state_.load() != VPEState::IDLE) { + SurfaceBufferInfo srcBufferInfo; + SurfaceBufferInfo dstBufferInfo; + { + std::lock_guard bufferLock(bufferLock_); + if (producerBufferQueue_.empty() || consumerBufferQueue_.empty()) { + break; + } + srcBufferInfo = consumerBufferQueue_.front(); + dstBufferInfo = producerBufferQueue_.front(); + if (srcBufferInfo.buffer == nullptr) { + consumerBufferQueue_.pop(); + continue; + } + if (dstBufferInfo.buffer == nullptr) { + producerBufferQueue_.pop(); + continue; + } + consumerBufferQueue_.pop(); + producerBufferQueue_.pop(); + } + if (isEnable_.load()) { + if (!ProcessBuffer(consumer, srcBufferInfo, dstBufferInfo)) { + continue; + } + } else { + BypassBuffer(srcBufferInfo, dstBufferInfo); + } + } +} + +bool VpeVideoImpl::ProcessBuffer(sptr& consumer, SurfaceBufferInfo& srcBufferInfo, + SurfaceBufferInfo& dstBufferInfo) +{ + if (srcBufferInfo.bufferFlag != VPE_BUFFER_FLAG_EOS) { + dstBufferInfo.timestamp = srcBufferInfo.timestamp; + auto errorCode = Process(srcBufferInfo.buffer, dstBufferInfo.buffer); + auto ret = consumer->ReleaseBuffer(srcBufferInfo.buffer, -1); + VPE_LOGD("consumer_->ReleaseBuffer({ %{public}s })=%{public}d", + ToString(srcBufferInfo.buffer).c_str(), ret); + if (errorCode != VPE_ALGO_ERR_OK) { + OnErrorLocked(errorCode); + std::lock_guard bufferLock(bufferLock_); + producerBufferQueue_.push(dstBufferInfo); + VPE_LOGW("Failed to process({ %{public}s },{ %{public}s })=%{public}d", + ToString(srcBufferInfo.buffer).c_str(), ToString(dstBufferInfo.buffer).c_str(), errorCode); + return false; + } + } + OutputBuffer(srcBufferInfo, dstBufferInfo, true, [] {}); + return true; +} + +void VpeVideoImpl::BypassBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo) +{ + { + std::lock_guard lock(lock_); + auto ret1 = producer_->DetachBufferFromQueue(dstBufferInfo.buffer); + auto ret2 = producer_->AttachBufferToQueue(srcBufferInfo.buffer); + SetRequestCfgLocked(srcBufferInfo.buffer); + VPE_LOGD("producer_->DetachBufferFromQueue({ %{public}s })=%{public}d, " + "AttachBufferToQueue({ %{public}s })=%{public}d requestCfg:{ %{public}s }", + ToString(dstBufferInfo.buffer).c_str(), ret1, + ToString(srcBufferInfo.buffer).c_str(), ret2, ToString(requestCfg_).c_str()); + } + OutputBuffer(srcBufferInfo, srcBufferInfo, false, [this, &srcBufferInfo] { + attachBufferIDs_.insert(srcBufferInfo.buffer->GetSeqNum()); + attachBufferQueue_.push(srcBufferInfo); + }); + VPE_LOGD("cache(%{public}s)->%{public}zu/%{public}zu", ToString(srcBufferInfo.buffer).c_str(), + attachBufferIDs_.size(), attachBufferIDs_.size()); +} + +void VpeVideoImpl::OutputBuffer(const SurfaceBufferInfo& bufferInfo, const SurfaceBufferInfo& bufferImage, + bool isProcessed, std::function&& getReadyToRender) +{ + { + std::lock_guard bufferLock(bufferLock_); + renderBufferQueue_.push(bufferImage); + getReadyToRender(); + } + VpeBufferInfo info { + .flag = bufferInfo.bufferFlag, + .presentationTimestamp = bufferInfo.timestamp, + }; + OnOutputBufferAvailable(bufferImage.buffer->GetSeqNum(), info); + if (!isEnableChange_.load()) { + VPE_LOGD("no enable change"); + return; + } + std::lock_guard lock(lock_); + if (isEnable_.load() && !isProcessed) { + VPE_LOGD("No frame is processed after enabling."); + return; + } + OnEffectChange(isEnable_.load() ? type_ : 0); + isEnableChange_ = false; +} + +bool VpeVideoImpl::PopBuffer(std::queue& bufferQueue, uint32_t index, SurfaceBufferInfo& bufferInfo, + std::function&)>&& func) +{ + bool isFound = false; + while (!bufferQueue.empty()) { + bufferInfo = bufferQueue.front(); + bufferQueue.pop(); + func(bufferInfo.buffer); + if (bufferInfo.buffer != nullptr && bufferInfo.buffer->GetSeqNum() == index) { + isFound = true; + break; + } + } + return isFound; +} + +void VpeVideoImpl::SetRequestCfgLocked(const sptr& buffer) +{ + requestCfg_.usage = buffer->GetUsage(); + requestCfg_.format = buffer->GetFormat(); + requestCfg_.width = buffer->GetWidth(); + requestCfg_.height = buffer->GetHeight(); +} + +void VpeVideoImpl::CheckSpuriousWakeup() +{ + if (!CheckStopping()) { + VPE_LOGD("Video processing spurious wakeup."); + } +} + +bool VpeVideoImpl::CheckStopping() +{ + std::lock_guard lock(lock_); + return CheckStoppingLocked(); +} + +bool VpeVideoImpl::CheckStoppingLocked() +{ + if (state_.load() == VPEState::STOPPING) { + state_ = VPEState::IDLE; + OnStateLocked(VPEAlgoState::STOPPED); + return true; + } + return false; +} + +void VpeVideoImpl::ClearQueue(std::queue& bufferQueue) +{ + if (bufferQueue.empty()) { + return; + } + bufferQueue = std::queue(); +} + +void VpeVideoImpl::ClearConsumerLocked(std::queue& bufferQueue) +{ + while (!bufferQueue.empty()) { + consumer_->ReleaseBuffer(bufferQueue.front().buffer, -1); + bufferQueue.pop(); + } +} + +void VpeVideoImpl::ClearBufferQueues() +{ + std::queue tempQueue1; + std::queue tempQueue2; + { + std::lock_guard bufferLock(bufferLock_); + isBufferQueueReady_ = false; + consumerBufferQueue_.swap(tempQueue1); + ClearQueue(producerBufferQueue_); + ClearQueue(renderBufferQueue_); + attachBufferQueue_.swap(tempQueue2); + attachBufferIDs_.clear(); + } + ClearConsumerLocked(tempQueue1); + ClearConsumerLocked(tempQueue2); +} + +VPEAlgoErrCode VpeVideoImpl::ExecuteWhenIdle(std::function&& operation, + const std::string& errorMessage) +{ + return ExecuteWithCheck([this] { return state_.load() == VPEState::IDLE; }, std::move(operation), errorMessage); +} + +VPEAlgoErrCode VpeVideoImpl::ExecuteWhenNotIdle(std::function&& operation, + const std::string& errorMessage) +{ + return ExecuteWithCheck([this] { return state_.load() != VPEState::IDLE; }, std::move(operation), errorMessage); +} + +VPEAlgoErrCode VpeVideoImpl::ExecuteWhenRunning(std::function&& operation, + const std::string& errorMessage) +{ + return ExecuteWithCheck([this] { return state_.load() == VPEState::RUNNING; }, std::move(operation), errorMessage); +} + +VPEAlgoErrCode VpeVideoImpl::ExecuteWithCheck(std::function&& checker, + std::function&& operation, const std::string& errorMessage) +{ + std::lock_guard lock(lock_); + if (checker()) { + return operation(); + } + VPE_LOGW("%{public}s", errorMessage.c_str()); + return VPE_ALGO_ERR_INVALID_OPERATION; +} + +void VpeVideoImpl::ConsumerListener::OnBufferAvailable() +{ + if (owner_ == nullptr) { + VPE_LOGE("Video processing is null!"); + return; + } + owner_->OnConsumerBufferAvailable(); +} diff --git a/framework/algorithm/common/frame_info.cpp b/framework/algorithm/common/frame_info.cpp new file mode 100644 index 0000000000000000000000000000000000000000..96911bb16b5030c98c5ddf4c9324021b6cb8c94e --- /dev/null +++ b/framework/algorithm/common/frame_info.cpp @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "frame_info.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +FrameInfo::FrameInfo(const sptr &buffer) +{ + CHECK_AND_RETURN_LOG(nullptr != buffer, "Get an invalid buffer"); + width = static_cast(buffer->GetWidth()); + height = static_cast(buffer->GetHeight()); + widthStride = static_cast(buffer->GetStride()); + // 10 bit sample might be as 2bytes, so stride is 2 * (width + padding) + pixelFormat = static_cast(buffer->GetFormat()); + bitDepth = BitDepth::BIT_DEPTH_8; + int numTwo = 2; + int numThree = 3; + int numFour = 4; + OH_NativeBuffer_Planes *planes = nullptr; + if ((pixelFormat == GRAPHIC_PIXEL_FMT_RGBA_8888) || (pixelFormat == GRAPHIC_PIXEL_FMT_BGRA_8888)) { + // RGBA8 has 4 channel per pixel which cost 4*8bit. // GetStride = w * sizeof(uint32) + widthStride = static_cast(buffer->GetStride() / numFour); + heightStride = buffer->GetSize() / static_cast(numFour) / widthStride; + } else if ((pixelFormat == GRAPHIC_PIXEL_FMT_YCBCR_420_SP) || (pixelFormat == GRAPHIC_PIXEL_FMT_YCRCB_420_SP) + || (pixelFormat == GRAPHIC_PIXEL_FMT_YCBCR_420_P) || (pixelFormat == GRAPHIC_PIXEL_FMT_YCRCB_420_P)) { + // GetStride = w * sizeof(uint8) // yuv420(totalsize=3/2*h*w) calculate height stride + widthStride = static_cast(buffer->GetStride()); + heightStride = buffer->GetSize() * static_cast(numTwo) / + static_cast(numThree) / widthStride; + if ((buffer->GetPlanesInfo(reinterpret_cast(&planes)) == OHOS::SURFACE_ERROR_OK) && + (planes != nullptr)) { + if (planes->planeCount > 1) { + heightStride = planes->planes[1].offset / planes->planes[0].columnStride; + } + } + } else if (pixelFormat == GRAPHIC_PIXEL_FMT_RGBA_1010102) { + // RGBA8 has 4 channel // GetStride = w * sizeof(uint8) (Get wByte nums) 4 channel + bitDepth = BitDepth::BIT_DEPTH_10; + widthStride = static_cast(buffer->GetStride() / numFour); + heightStride = buffer->GetSize() / static_cast(numFour) / widthStride; + } else if ((pixelFormat == GRAPHIC_PIXEL_FMT_YCBCR_P010) || (pixelFormat == GRAPHIC_PIXEL_FMT_YCRCB_P010)) { + // 2 bit GetStride = w * sizeof(uint8) (Get wByte nums) // yuv420(totalsize=3*h*w) calculate height stride + bitDepth = BitDepth::BIT_DEPTH_10; + widthStride = static_cast(buffer->GetStride() / numTwo); + heightStride = buffer->GetSize() / static_cast(numThree) / widthStride; + if ((buffer->GetPlanesInfo(reinterpret_cast(&planes)) == OHOS::SURFACE_ERROR_OK) && + (planes != nullptr)) { + if (planes->planeCount > 1) { + heightStride = planes->planes[1].offset / planes->planes[0].columnStride; + } + } + } else { + heightStride = height; + } + ColorSpaceDescription::Create(buffer, colorSpace); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/common/image_opencl_wrapper.cpp b/framework/algorithm/common/image_opencl_wrapper.cpp new file mode 100644 index 0000000000000000000000000000000000000000..905acbd1b332cd1f4e78acca68e7d9c5e77edc98 --- /dev/null +++ b/framework/algorithm/common/image_opencl_wrapper.cpp @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define USE_OPENCL_WRAPPER +#ifdef USE_OPENCL_WRAPPER + +#include "image_opencl_wrapper.h" +#include +#include +#include +#include +#include +#include +#include +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +// default opencl library path +static const std::vector g_opencl_library_paths = { +#if defined(__APPLE__) || defined(__MACOSX) + "libOpenCL.so", "/System/Library/Frameworks/OpenCL.framework/OpenCL" +#else + "/vendor/lib64/chipsetsdk/libGLES_mali.so", + "/system/lib64/libGLES_mali.so", + "libGLES_mali.so", + "/vendor/lib64/chipsetsdk/libhvgr_v200.so", + "/vendor/lib64/chipsetsdk/libEGL_impl.so", +#endif +}; + +static std::mutex g_initMutex; +static bool g_isInit = false; +static bool g_loadSuccess = false; +static void *g_handle{nullptr}; +// load default library path + static bool LoadLibraryFromPath(const std::string &libraryPath, void **handlePtr) + { + CHECK_AND_RETURN_RET_LOG(handlePtr != nullptr, false, "handlePtr null!"); + + char path[PATH_MAX] = ""; + if (realpath(libraryPath.c_str(), path) == nullptr) { + return false; + } + if (strcmp(path, "") == 0) { + return false; + } + *handlePtr = dlopen(path, RTLD_NOW | RTLD_LOCAL); + CHECK_AND_RETURN_RET_LOG(*handlePtr != nullptr, false, "*handlePtr null!"); + + // load function ptr use dlopen and dlsym. + clImportMemory = reinterpret_cast(dlsym(*handlePtr, "clImportMemoryARM")); + CHECK_AND_RETURN_RET_LOG(clImportMemory != nullptr, false, "clImportMemory null!"); + + return true; + } + + bool LoadOpenCLLibrary(void **handlePtr) + { + CHECK_AND_RETURN_RET_LOG(handlePtr != nullptr, false, "handlePtr null!"); + auto it = + std::find_if(g_opencl_library_paths.begin(), g_opencl_library_paths.end(), + [&](const std::string &lib_path) { + return OHOS::Media::LoadLibraryFromPath(lib_path, handlePtr); + }); + CHECK_AND_RETURN_RET_LOG(it == g_opencl_library_paths.end(), true, "LoadOpenCLLibrary true!"); + return false; + } + + bool InitOpenCL() + { + std::lock_guard lock(g_initMutex); + CHECK_AND_RETURN_RET_LOG(!g_isInit, g_loadSuccess, "InitOpenCL g_loadSuccess!"); + g_isInit = true; + g_loadSuccess = LoadOpenCLLibrary(&g_handle); + return g_loadSuccess; + } + + void DeInitOpenCL() + { + std::lock_guard lock(g_initMutex); + if (g_handle != nullptr) { + dlclose(g_handle); + g_handle = nullptr; + } + } + + clImportMemoryFunc clImportMemory = nullptr; + +} // namespace Media +} // namespace OHOS + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +// clImportMemory wrapper, use OpenCLWrapper function. +cl_mem ClImportMemory(ClImportMemoryParam clImportMemoryParam) +{ + auto status = OHOS::Media::InitOpenCL(); + CHECK_AND_RETURN_RET_LOG(status, nullptr, "clImportMemory InitOpenCL null!"); + auto func = OHOS::Media::clImportMemory; + MS_ASSERT(func != nullptr); + return func(clImportMemoryParam.context, clImportMemoryParam.flags, clImportMemoryParam.properties, + clImportMemoryParam.fd, clImportMemoryParam.size, clImportMemoryParam.errcode_ret); +} +void ClDeInitOpenCL() +{ + OHOS::Media::DeInitOpenCL(); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // USE_OPENCL_WRAPPER diff --git a/framework/algorithm/common/image_openclsetup.cpp b/framework/algorithm/common/image_openclsetup.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8230122b96b8888daf125fb7a451183b4c844da7 --- /dev/null +++ b/framework/algorithm/common/image_openclsetup.cpp @@ -0,0 +1,295 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "image_openclsetup.h" +#include "image_opencl_wrapper.h" +#include +#include +#include "vpe_log.h" + +// OpenCL Init +// Enable MY_DEBUG to support printf in kernel. +#define MY_TUNING +#ifdef MY_DEBUG +static void printf_callback(const char *buffer, size_t len, size_t complete, void *usr_data) +{ + VPE_LOGI("output length: %{public}d\noutput data: \n%{public}s\n", (cl_uint)len, buffer); +} +#endif + +static cl_int InitContext(ClContext *pCtx) +{ + cl_int status = CL_SUCCESS; + // Get platform number + pCtx->numPlatforms = 0; + status = clGetPlatformIDs(0, nullptr, &pCtx->numPlatforms); // ~{DZ4fP9B)~} + CHECK_AND_RETURN_RET_LOG(!((status != CL_SUCCESS) || (pCtx->numPlatforms == 0)), status, + "[GPU]: Fail to get platform IDs1. (clGetPlatformIDs)."); + VPE_LOGI("(clGetPlatformIDs). status = %{public}d, numPlatforms = %{public}d\n", status, pCtx->numPlatforms); + + // Allocate tables for platform IDs, devices list header, devices number and device index + cl_uint numValue = pCtx->numPlatforms; + pCtx->platforms = reinterpret_cast(malloc(pCtx->numPlatforms * sizeof(cl_platform_id))); + CHECK_AND_RETURN_RET_LOG(pCtx->platforms != nullptr, CL_INVALID_VALUE, "pCtx platforms null!"); + CHECK_AND_LOG(memset_sp(pCtx->platforms, numValue * sizeof(cl_platform_id), 0, + numValue * sizeof(cl_platform_id)) == 0, "memsetFail"); + pCtx->numDevices = reinterpret_cast(malloc(pCtx->numPlatforms * sizeof(cl_uint))); + CHECK_AND_RETURN_RET_LOG(pCtx->numDevices != nullptr, CL_INVALID_VALUE, "pCtx numDevices null!"); + CHECK_AND_LOG(memset_sp(pCtx->numDevices, numValue * sizeof(cl_uint), 0, numValue * sizeof(cl_uint)) == 0, + "memsetFail"); + pCtx->idxDevices = reinterpret_cast(malloc(pCtx->numPlatforms * sizeof(cl_uint))); + CHECK_AND_RETURN_RET_LOG(pCtx->idxDevices != nullptr, CL_INVALID_VALUE, "pCtx idxDevices null!"); + CHECK_AND_LOG(memset_sp(pCtx->idxDevices, numValue * sizeof(cl_uint), 0, numValue * sizeof(cl_uint)) == 0, + "memsetFail"); + pCtx->devices = reinterpret_cast(malloc(pCtx->numPlatforms * sizeof(cl_device_id *))); + CHECK_AND_RETURN_RET_LOG(pCtx->devices != nullptr, CL_INVALID_VALUE, "pCtx devices null!"); + CHECK_AND_LOG(memset_sp(pCtx->devices, numValue * sizeof(cl_device_id *), 0, + numValue * sizeof(cl_device_id *)) == 0, "memsetFail"); + for (cl_uint i = 0; i < pCtx->numPlatforms; i++) { + pCtx->devices[i] = nullptr; + } + return status; +} + +static cl_int GetDevice(ClContext *pCtx) +{ + cl_int status = CL_SUCCESS; + // Get platforms + status = clGetPlatformIDs(pCtx->numPlatforms, pCtx->platforms, nullptr); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get platform IDs2. (clGetPlatformIDs)."); + + // Get all devices for each of platform + for (cl_uint i = 0; i < pCtx->numPlatforms; i++) { + cl_uint numDevices = 0; + status = clGetDeviceIDs(pCtx->platforms[i], CL_DEVICE_TYPE_ALL, 0, nullptr, &numDevices); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get device IDs3. (clGetDeviceIDs)."); + + pCtx->numDevices[i] = numDevices; + pCtx->idxDevices[i] = (cl_uint)(-1); + + if (numDevices == 0) { + pCtx->devices[i] = nullptr; + continue; + } + + pCtx->devices[i] = reinterpret_cast(malloc(numDevices * sizeof(cl_device_id))); + CHECK_AND_RETURN_RET_LOG(pCtx->devices[i] != nullptr, -1, "pCtx devices[i] null!"); + + status = clGetDeviceIDs(pCtx->platforms[i], CL_DEVICE_TYPE_ALL, numDevices, pCtx->devices[i], nullptr); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get device IDs4. (clGetDeviceIDs)."); + } + return status; +} + +static cl_int ChoosePlatform(ClContext *pCtx, cl_uchar infoBuf[], size_t lenInfoBuf, const char *vendorName) +{ + cl_int status = CL_SUCCESS; + CHECK_AND_RETURN_RET_LOG(lenInfoBuf <= INFO_BUFFER_LENGTH, CL_DEVICE_NOT_FOUND, "lenInfoBuf>128!"); + // Choose vendorName platform to work + pCtx->idxPlatforms = (cl_uint)(-1); + if (pCtx->numPlatforms == 1) { + pCtx->idxPlatforms = 0; + } else { + for (cl_uint i = 0; i < pCtx->numPlatforms; i++) { + // Get vendor name + status = clGetPlatformInfo(pCtx->platforms[i], CL_PLATFORM_VENDOR, INFO_BUFFER_LENGTH, infoBuf, nullptr); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get platform Info. (clGetPlatformInfo). "); + if (!strcmp(static_cast(static_cast(infoBuf)), vendorName)) { + pCtx->idxPlatforms = i; + break; + } + } + } + + // exit to host program if no vendorName is found. + if ((cl_uint)(-1) == pCtx->idxPlatforms) { + VPE_LOGE("[GPU]: There is no platform to use. exit.\n"); + status = CL_DEVICE_NOT_AVAILABLE; + return status; + } + return status; +} + +static cl_int ChooseDevice(ClContext *pCtx, cl_device_id *targetDevice, cl_uchar infoBuf[], size_t lenInfoBuf, + char *deviceName) +{ + cl_int status = CL_SUCCESS; + CHECK_AND_RETURN_RET_LOG(lenInfoBuf <= INFO_BUFFER_LENGTH, CL_DEVICE_NOT_FOUND, "lenInfoBuf>128!"); + // Choose GPU device to work + pCtx->idxDevices[pCtx->idxPlatforms] = (cl_uint)(-1); + for (cl_uint i = 0; i < pCtx->numDevices[pCtx->idxPlatforms]; i++) { + status = + clGetDeviceInfo(pCtx->devices[pCtx->idxPlatforms][i], CL_DEVICE_TYPE, INFO_BUFFER_LENGTH, infoBuf, nullptr); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get device Info. (clGetDeviceInfo)."); + { + *targetDevice = pCtx->devices[pCtx->idxPlatforms][i]; + pCtx->idxDevices[pCtx->idxPlatforms] = i; + break; + } + } + if (deviceName != nullptr) { + constexpr int deviceLength = 32; + status = clGetDeviceInfo(pCtx->devices[pCtx->idxPlatforms][pCtx->idxDevices[pCtx->idxPlatforms]], + CL_DEVICE_NAME, + deviceLength, + deviceName, + nullptr); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, + "[GPU]: Fail to get device Info. (clGetDeviceInfo)."); + deviceName[strlen(deviceName)] = '\0'; + } + // exit to host program if no GPU device of vendorName is found. + CHECK_AND_RETURN_RET_LOG(!((cl_uint)(-1) == pCtx->idxDevices[pCtx->idxPlatforms]), CL_DEVICE_NOT_FOUND, + "[GPU]: There is no GPU to use. exit."); + return status; +} + +static cl_int CreateContext(ClContext *pCtx, cl_device_id targetDevice) +{ + cl_int status = CL_SUCCESS; + pCtx->context = clCreateContext( + nullptr, pCtx->numDevices[pCtx->idxPlatforms], pCtx->devices[pCtx->idxPlatforms], nullptr, nullptr, &status); + + CHECK_AND_RETURN_RET_LOG(((status == CL_SUCCESS) || (pCtx->context == nullptr)), status, + "Error: Fail to create CL context."); + + const cl_bitfield props[] = {CL_QUEUE_PRIORITY_KHR, CL_QUEUE_PRIORITY_LOW_KHR, 0}; + pCtx->cmdQueueGPU = clCreateCommandQueueWithProperties(pCtx->context, targetDevice, props, &status); + CHECK_AND_RETURN_RET_LOG(!((status != CL_SUCCESS) || (pCtx->cmdQueueGPU == nullptr)), status, + "Error: Fail to create CL cmdQueueGPU."); + + return status; +} + +// These code need to be modified in the future if there are more platforms and devices +// and need other platform/device select policies. +static cl_int InitOpenCL(ClContext *pCtx, const char *vendorName, char *deviceName) +{ + cl_uchar infoBuf[INFO_BUFFER_LENGTH] = {0}; + cl_int status; + cl_device_id targetDevice = (cl_device_id)0; + + status = InitContext(pCtx); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, "InitContext fail!"); + + status = GetDevice(pCtx); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, "GetDevice fail!"); + + status = ChoosePlatform(pCtx, infoBuf, sizeof(infoBuf), vendorName); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, "ChoosePlatform fail!"); + + status = ChooseDevice(pCtx, &targetDevice, infoBuf, sizeof(infoBuf), deviceName); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, "ChooseDevice fail!"); + + // Create context + status = CreateContext(pCtx, targetDevice); + CHECK_AND_RETURN_RET_LOG(status == CL_SUCCESS, status, "CreateContext fail!"); + + return status; +} + +static void CleanDevice(ClContext *pCtx) +{ + if (pCtx->cmdQueueGPU) { + // Get cmd queue ref count to ensure no memory leak. + cl_uchar infoBuf[INFO_BUFFER_LENGTH] = {0}; + int status = + clGetCommandQueueInfo(pCtx->cmdQueueGPU, CL_QUEUE_REFERENCE_COUNT, sizeof(infoBuf), infoBuf, nullptr); + CHECK_AND_LOG(status == CL_SUCCESS, "[GPU] clGetCommandQueueInfo Failed!"); + cl_int *infoBufTmp = reinterpret_cast(infoBuf); + VPE_LOGI("[GPU]: cmd Queue ref count before release it : %{public}d\n", *infoBufTmp); + status = clReleaseCommandQueue(pCtx->cmdQueueGPU); + CHECK_AND_LOG(status == CL_SUCCESS, "[GPU] clReleaseCommandQueue Failed!"); + } + if (pCtx->context) { + cl_uchar infoBuf[INFO_BUFFER_LENGTH] = {0}; + // Get context ref count to ensure no memory leak. + int status = clGetContextInfo(pCtx->context, CL_CONTEXT_REFERENCE_COUNT, sizeof(infoBuf), infoBuf, nullptr); + CHECK_AND_LOG(status == CL_SUCCESS, "[GPU] clGetContextInfo Failed!"); + cl_int *infoBufTmp = reinterpret_cast(infoBuf); + VPE_LOGI("[GPU]: Context ref count before release it : %{public}d\n", *infoBufTmp); + status = clReleaseContext(pCtx->context); + CHECK_AND_LOG(status == CL_SUCCESS, "[GPU] clReleaseContext Failed!"); + } + if (pCtx->devices != nullptr) { + for (cl_uint i = 0; i < pCtx->numPlatforms; i++) { + if (pCtx->devices[i] != nullptr) { + free(pCtx->devices[i]); + pCtx->devices[i] = nullptr; + } + } + free(pCtx->devices); + pCtx->devices = nullptr; + } + if (pCtx->idxDevices != nullptr) { + free(pCtx->idxDevices); + pCtx->idxDevices = nullptr; + } + if (pCtx->numDevices != nullptr) { + free(pCtx->numDevices); + pCtx->numDevices = nullptr; + } + if (pCtx->platforms != nullptr) { + free(pCtx->platforms); + pCtx->platforms = nullptr; + } + if (pCtx != nullptr) { + free(pCtx); + pCtx = nullptr; + } +} + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +void CleanOpencl(ClContext *pCtx) +{ + if (pCtx == nullptr) { + VPE_LOGI("[GPU]: WARNING: ClneanOpencl input hadle is nullptr.\n"); + } else { + // Release infrastructure like CmdQ/context. Show ref count to see if there is memory leak. + // Free the whole clContext created by setupOpencl. + CleanDevice(pCtx); + } + OHOS::Media::VideoProcessingEngine::ClDeInitOpenCL(); +} + +int SetupOpencl(void **pHandle, const char *vendorName, char *deviceName) +{ + ClContext *pCtx = reinterpret_cast(calloc(1, sizeof(ClContext))); + if (pCtx == nullptr) { + VPE_LOGE("[GPU]: SetupOpencl FAIL to allocate OpenCL Foundation context.\n"); + *pHandle = nullptr; + return CL_MEM_OBJECT_ALLOCATION_FAILURE; + } + + // Init CL infrastructure + cl_int status = InitOpenCL(pCtx, vendorName, deviceName); + if (status != CL_SUCCESS) { + CleanOpencl(pCtx); + *pHandle = nullptr; + return status; + } + + *(reinterpret_cast(pHandle)) = pCtx; + return static_cast(status); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/framework/algorithm/common/include/algorithm_utils.h b/framework/algorithm/common/include/algorithm_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..2b5730148cd09dd7511a34589f345778d08c4d3b --- /dev/null +++ b/framework/algorithm/common/include/algorithm_utils.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_ALGORITHM_UTILS_H +#define INTERFACES_INNER_API_ALGORITHM_ALGORITHM_UTILS_H + +#include + +#include "algorithm_errors.h" +#include "algorithm_video_common.h" +#include "surface_buffer.h" + +#define VPE_TO_STR(val) (std::string(#val) + "(" + std::to_string(static_cast(val)) + ")") + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class AlgorithmUtils { +public: + static std::string ToString(VPEAlgoErrCode errorCode); + static std::string ToString(VPEAlgoState state); + static bool CopySurfaceBufferToSurfaceBuffer(const sptr& srcBuffer, + sptr& destBuffer); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_ALGORITHM_UTILS_H diff --git a/framework/algorithm/common/include/algorithm_video_impl.h b/framework/algorithm/common/include/algorithm_video_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..1800be6b6485d00d819c8c2d57c86e24c02596a9 --- /dev/null +++ b/framework/algorithm/common/include/algorithm_video_impl.h @@ -0,0 +1,169 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ALGORITHM_VIDEO_IMPL_H +#define ALGORITHM_VIDEO_IMPL_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "refbase.h" +#include "surface.h" + +#include "algorithm_errors.h" +#include "algorithm_video.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VpeVideoImpl : public VpeVideo, public std::enable_shared_from_this { +public: + VPEAlgoErrCode RegisterCallback(const std::shared_ptr& callback) override; + VPEAlgoErrCode SetOutputSurface(const sptr& surface) override; + sptr GetInputSurface() override; + VPEAlgoErrCode Start() override; + VPEAlgoErrCode Stop() override; + VPEAlgoErrCode Flush() override; + VPEAlgoErrCode Enable() override; + VPEAlgoErrCode Disable() override; + VPEAlgoErrCode NotifyEos() override; + VPEAlgoErrCode ReleaseOutputBuffer(uint32_t index, bool render) override; + VPEAlgoErrCode RenderOutputBufferAtTime(uint32_t index, int64_t renderTimestamp) override; + +protected: + explicit VpeVideoImpl(uint32_t type) : type_(type) {} + virtual ~VpeVideoImpl(); + VpeVideoImpl(const VpeVideoImpl&) = delete; + VpeVideoImpl& operator=(const VpeVideoImpl&) = delete; + VpeVideoImpl(VpeVideoImpl&&) = delete; + VpeVideoImpl& operator=(VpeVideoImpl&&) = delete; + + bool IsInitialized() const; + VPEAlgoErrCode Initialize(); + VPEAlgoErrCode Deinitialize(); + void RefreshBuffers(); + void OnOutputFormatChanged(const Format& format); + + virtual VPEAlgoErrCode OnInitialize(); + virtual VPEAlgoErrCode OnDeinitialize(); + virtual VPEAlgoErrCode Process(const sptr& sourceImage, sptr& destinationImage); + virtual bool IsProducerSurfaceValid(const sptr& surface); + virtual VPEAlgoErrCode UpdateRequestCfg(const sptr& surface, BufferRequestConfig& requestCfg); + virtual void UpdateRequestCfg(const sptr& consumerBuffer, BufferRequestConfig& requestCfg); + +private: + enum class VPEState : int { + IDLE = 0, + RUNNING, + STOPPING + }; + + struct SurfaceBufferInfo { + sptr buffer{}; + VpeBufferFlag bufferFlag{VPE_BUFFER_FLAG_NONE}; + int64_t timestamp{}; + }; + + class ConsumerListener : public IBufferConsumerListener { + public: + explicit ConsumerListener(const std::shared_ptr& owner) : owner_(owner) {} + virtual ~ConsumerListener() = default; + ConsumerListener(const ConsumerListener&) = delete; + ConsumerListener& operator=(const ConsumerListener&) = delete; + ConsumerListener(ConsumerListener&&) = delete; + ConsumerListener& operator=(ConsumerListener&&) = delete; + + void OnBufferAvailable() final; + + private: + std::shared_ptr owner_; + }; + + void OnErrorLocked(VPEAlgoErrCode errorCode); + void OnStateLocked(VPEAlgoState state); + void OnEffectChange(uint32_t type); + void OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info); + + GSError OnConsumerBufferAvailable(); + GSError OnProducerBufferReleased(); + + VPEAlgoErrCode RenderOutputBufferLocked(uint32_t index, int64_t renderTimestamp, bool render); + sptr CreateConsumerSurfaceLocked(); + bool RequestBuffer(GSError& errorCode); + void PrepareBuffers(); + void ProcessBuffers(); + bool ProcessBuffer(sptr& consumer, SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); + void BypassBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); + void OutputBuffer(const SurfaceBufferInfo& bufferInfo, const SurfaceBufferInfo& bufferImage, bool isProcessed, + std::function&& getReadyToRender); + bool PopBuffer(std::queue& bufferQueue, uint32_t index, SurfaceBufferInfo& bufferInfo, + std::function&)>&& func); + void SetRequestCfgLocked(const sptr& buffer); + void CheckSpuriousWakeup(); + bool CheckStopping(); + bool CheckStoppingLocked(); + void ClearQueue(std::queue& bufferQueue); + void ClearConsumerLocked(std::queue& bufferQueue); + void ClearBufferQueues(); + + VPEAlgoErrCode ExecuteWhenIdle(std::function&& operation, const std::string& errorMessage); + VPEAlgoErrCode ExecuteWhenNotIdle(std::function&& operation, const std::string& errorMessage); + VPEAlgoErrCode ExecuteWhenRunning(std::function&& operation, const std::string& errorMessage); + VPEAlgoErrCode ExecuteWithCheck(std::function&& checker, + std::function&& operation, const std::string& errorMessage); + + // Common + uint32_t type_{}; + + // For thread control + std::condition_variable cv_{}; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::atomic isRunning_{false}; + std::atomic isEnable_{true}; + std::atomic isEnableChange_{true}; + std::atomic state_{VPEState::IDLE}; + std::thread worker_{}; + std::shared_ptr cb_{}; + sptr consumer_{}; + sptr producer_{}; + BufferRequestConfig requestCfg_{}; + // Guarded by lock_ end + + mutable std::mutex bufferLock_{}; + // Guarded by bufferLock_ begin + bool isBufferQueueReady_{}; + std::queue consumerBufferQueue_{}; + std::queue producerBufferQueue_{}; + std::queue renderBufferQueue_{}; + std::queue attachBufferQueue_{}; + std::set attachBufferIDs_{}; + // Guarded by bufferLock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // ALGORITHM_VIDEO_IMPL_H diff --git a/framework/algorithm/common/include/frame_info.h b/framework/algorithm/common/include/frame_info.h new file mode 100644 index 0000000000000000000000000000000000000000..d736060e6e6bf1c7b9c3ff8c3331cbcbf106d078 --- /dev/null +++ b/framework/algorithm/common/include/frame_info.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COMMON_FRAME_INFO_H +#define FRAMEWORK_ALGORITHM_COMMON_FRAME_INFO_H + +#include +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum class BitDepth { + BIT_DEPTH_8, // 8bit位深 + BIT_DEPTH_10, // 10bit位深 + BIT_DEPTH_FP16 // 16bit位深,fp16格式 +}; + +struct FrameInfo { + uint32_t width = 0; // 输入输出最大图像宽 + uint32_t height = 0; // 输入输出最大图像高 + uint32_t widthStride = 0; // input and output img width stride(padding at the end of the lines) + uint32_t heightStride = 0; // input and output img height stride + BitDepth bitDepth = BitDepth::BIT_DEPTH_8; // 输入图像位宽 + GraphicPixelFormat pixelFormat = GRAPHIC_PIXEL_FMT_RGBA_8888; // 像素格式 + ColorSpaceDescription colorSpace = {GetColorSpaceInfo(CM_SRGB_FULL), CM_METADATA_NONE}; + + FrameInfo() = default; + explicit FrameInfo(const sptr &buffer); +}; +struct ColorSpaceInfo { + GraphicPixelFormat pixelFormat = GRAPHIC_PIXEL_FMT_RGBA_8888; // 像素格式 + CM_ColorSpaceType colorSpace = CM_SRGB_FULL; // 颜色空间 + CM_HDR_Metadata_Type metadataType = CM_METADATA_NONE; // 颜色空间标志 +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COMMON_FRAME_INFO_H diff --git a/framework/algorithm/common/include/hdr_vivid_metadata_v1.h b/framework/algorithm/common/include/hdr_vivid_metadata_v1.h new file mode 100644 index 0000000000000000000000000000000000000000..3a8c16408810733114e2233d03bc7a646988556a --- /dev/null +++ b/framework/algorithm/common/include/hdr_vivid_metadata_v1.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_FRAMEWORK_ALGORITHM_COMMON_HDR_VIVID_METADATA_V1_H +#define VPE_FRAMEWORK_ALGORITHM_COMMON_HDR_VIVID_METADATA_V1_H + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct HdrVividMetadataV1 { + unsigned int systemStartCode; // 表示系统版本号 + // minimum_maxrgb_pq,表示显示内容的最小亮度,pq域,范围从0~4095 + unsigned int minimumMaxRgbPq; + unsigned int averageMaxRgbPq; // average_maxrgb_pq,表示显示内容的平均亮度,pq域,范围从0.0~4095 + // variance_maxrgb_pq,表示显示内容的变化范围,pq域,范围从0.0~4095 + unsigned int varianceMaxRgbPq; + unsigned int maximumMaxRgbPq; // maximum_maxrgb_pq,表示显示内容的最大亮度,pq域,范围从0.0~4095 + // tone_mapping_enable_mode_flag,取值为0或1,若为0,则不传曲线参数,否则传输参数 + unsigned int toneMappingMode; + // tone_mapping_para_enable_num,表示当前色调映射参数组的数目减1, + // 为0时色调映射参数组的数目为1,为1时色调映射参数组的数目为2 + unsigned int toneMappingParamNum; + // targeted_system_display_maximum_luminancePq,参考目标显示器最高亮度,范围从0.0~4095, + // 数组长度为2说明有2组参数,tone_mapping_param_num + unsigned int targetedSystemDisplayMaximumLuminance[2]; + // base_enable_flag,基础曲线标识,取值为0或1,为0则不传输基础曲线参数,为1则传输参数 + unsigned int baseFlag[4]; + unsigned int baseParamMp[2]; // 范围0~16383 + unsigned int baseParamMm[2]; // 范围0~63 + unsigned int baseParamMa[2]; // 范围0~1023 + unsigned int baseParamMb[2]; // 范围0~1023 + unsigned int baseParamMn[2]; // 范围0~63 + unsigned int baseParamK1[2]; // 分小于等于1 和 大于1两种情况 + unsigned int baseParamK2[2]; // 分小于等于1 和 大于1两种情况 + unsigned int baseParamK3[2]; // 分小于等于1 和 1~2 和 大于2两种情况 + // base_param_delta_enable_mode,标识当前基础曲线映射参数的调整系数模式 + unsigned int baseParamDeltaMode[2]; + // base_param_enable_delta,标识当前基础曲线映射参数的调整系数值,范围0~127 + unsigned int baseParamDelta[2]; + // 3Spline_enable_flag,二值变量,为1时标识传输三次样条参数,为0时不传 + unsigned int threeSplineFlag[2]; + unsigned int threeSplineNum[2]; // 3Spline_enable_num,标识三次样条区间数量,取值为0和1 + // 3Spline_TH_enable_mode,标识色调映射的三次样条模式,范围为0~3, + // 这里数组长度4是由于P3Spline_num + unsigned int threeSplineThMode[2][4]; + // 3Spline_TH_enable_MB,指示色调映射的三次样条区间参数的斜率和暗区偏移量 + unsigned int threeSplineThMb[2][4]; + // 3Spline_TH_enable, 指示色调映射的三次样条区间参数,范围0~4095,第三维度表示三次样 + // 条区间参数(0~4095)、三次样条区间1偏移量(0~1023)和三次样条区间2偏移量(0~1023) + unsigned int threeSplineTh[2][4][3]; + // 3Spline_enable_Strength,指示色调映射的三次样条区间的修正幅度参数,范围0~255 + unsigned int threeSplineStrength[2][4]; + unsigned int colorSaturationMappingFlag; // 对应标准中color_saturation_mapping_enable_flag + unsigned int colorSaturationNum; // 对应标准中color_saturation_enable_num + unsigned int colorSaturationGain[16]; // 对应标准中color_saturation_enable_gain + std::vector gtmLut; // 存储aihdr生成的lut +}; +// C-Link paramter +typedef struct HwDisplayMeta { + unsigned int type; // 0x03 + unsigned int size; // meta大小 + unsigned int startX; + unsigned int startY; + unsigned int endX; + unsigned int endY; + unsigned int lutMap[17]; + unsigned int averageLuma; + unsigned int scene; // 0 风景 1 人像 + unsigned int expo; + unsigned int iso; + unsigned int flash; + unsigned int hdrHistgram[32]; + unsigned int noiseLevel; + unsigned int apetureValue; + unsigned int aelv; + unsigned int maxHeadroom; +} HwDisplayMeta; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VPE_FRAMEWORK_ALGORITHM_COMMON_HDR_VIVID_METADATA_V1_H diff --git a/framework/algorithm/common/include/image_opencl_wrapper.h b/framework/algorithm/common/include/image_opencl_wrapper.h new file mode 100644 index 0000000000000000000000000000000000000000..a44fb1c5cd5ecbc2f7008a63e6c5111929b9fb72 --- /dev/null +++ b/framework/algorithm/common/include/image_opencl_wrapper.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_OPENCL_WRAPPER_H_ +#define IMAGE_OPENCL_WRAPPER_H_ + +#include +#include +#define CL_TARGET_OPENCL_VERSION 300 +#include +#include + +#define MS_ASSERT(f) ((void)0) + +namespace OHOS { +// This is a opencl function wrapper. +namespace Media { + +using clImportMemoryFunc = cl_mem (*)(cl_context, cl_mem_flags, const cl_import_properties_arm *, void *, size_t, + cl_int *); + +extern clImportMemoryFunc clImportMemory; +} // namespace Media +} // namespace OHOS +struct ClImportMemoryParam { + cl_context context; + cl_mem_flags flags; + const cl_import_properties_arm* properties; + void* fd; + size_t size; + cl_int* errcode_ret; +}; +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +extern cl_mem ClImportMemory(ClImportMemoryParam clImportMemoryParam); +extern void ClDeInitOpenCL(); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_OPENCL_WRAPPER_H_ diff --git a/framework/algorithm/common/include/image_openclsetup.h b/framework/algorithm/common/include/image_openclsetup.h new file mode 100644 index 0000000000000000000000000000000000000000..f639d706db3cdbba5ff8228e5180dd1f6e490fe0 --- /dev/null +++ b/framework/algorithm/common/include/image_openclsetup.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef IMAGE_OPENCL_SETUP_H +#define IMAGE_OPENCL_SETUP_H +#define CL_USE_DEPRECATED_OPENCL_1_2_APIS +#include +#include +#include +#include +#include + +struct ClContext { + // Platform and device + cl_platform_id *platforms{}; + cl_uint numPlatforms{}; // Total platforms + cl_uint idxPlatforms{}; // Current platform + cl_device_id **devices{}; // Devices[idxPlatforms] is link list header of platform[idxPlatforms] + cl_uint *numDevices{}; // each numDevices[i] record device number of each platform. + cl_uint *idxDevices{}; // Current device under current platform + + // CL Context and Command queue + cl_context context{}; + cl_command_queue cmdQueueGPU{}; + cl_command_queue cmdQueueCPU{}; + cl_command_queue cmdQueueDSP{}; + cl_command_queue cmdQueueOther{}; +}; +#define INFO_BUFFER_LENGTH 128 + +extern "C" { +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +int SetupOpencl(void **pHandle, const char *vendorName, char *deviceName); +void CleanOpencl(ClContext *pCtx); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +} + +#endif // IMAGE_OPENCL_SETUP_H diff --git a/framework/algorithm/common/include/vpe_context.h b/framework/algorithm/common/include/vpe_context.h new file mode 100644 index 0000000000000000000000000000000000000000..8fdd8b79714617a681c57579113f90e21647d761 --- /dev/null +++ b/framework/algorithm/common/include/vpe_context.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_COMMON_CREAT_CONTEXT_H +#define FRAMEWORK_ALGORITHM_COMMON_CREAT_CONTEXT_H + +#include +#include "algorithm_common.h" +#include "image_openclsetup.h" +#include "EGL/egl.h" + +typedef void *EGLDisplay; +struct OpenGLContext { + EGLDisplay display {EGL_NO_DISPLAY}; + EGLContext context {EGL_NO_CONTEXT}; +}; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +typedef void *EGLDisplay; + +struct VPEContext { + ClContext *clContext {nullptr}; + EGLDisplay glDisplay {EGL_NO_DISPLAY}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_COMMON_FRAME_INFO_H diff --git a/framework/algorithm/common/include/vpe_parse_metadata.h b/framework/algorithm/common/include/vpe_parse_metadata.h new file mode 100644 index 0000000000000000000000000000000000000000..e1694f41d8a74f0d066237b3534799ad5f721c49 --- /dev/null +++ b/framework/algorithm/common/include/vpe_parse_metadata.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_FRAMEWORK_ALGORITHM_COMMON_PARSE_METADATA_H +#define VPE_FRAMEWORK_ALGORITHM_COMMON_PARSE_METADATA_H + +#include +#include "hdr_vivid_metadata_v1.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +constexpr int METADATA_MAX_LENGTH = 512; +constexpr int AIHDR_METADATA_MAX_LENGTH = 128; +// 以下变量根据CUVA标准7.4确定了每个元数据所占用的bit数 +using CuvaMetadataBitNum = enum { + SYSTEM_START_CODE_BIT = 8, + MINIMUM_MAXRGB_BIT = 12, + AVERAGE_MAXRGB_BIT = 12, + VARIANCE_MAXRGB_BIT = 12, + MAXIMUM_MAXRGB_BIT = 12, + TONE_MAPPING_MODE_BIT = 1, + TONE_MAPPING_PARAM_NUM_BIT = 1, + TARGETED_SYSTEM_DISPLAY_BIT = 12, + BASE_FLAG_BIT = 1, + BASE_PARAM_MP_BIT = 14, + BASE_PARAM_MM_BIT = 6, + BASE_PARAM_MA_BIT = 10, + BASE_PARAM_MB_BIT = 10, + BASE_PARAM_MN_BIT = 6, + BASE_PARAM_K1_BIT = 2, + BASE_PARAM_K2_BIT = 2, + BASE_PARAM_K3_BIT = 4, + BASE_PARAM_DELTA_MODE_BIT = 3, + BASE_PARAM_DELTA_BIT = 7, + P3SPLINE_FLAG_BIT = 1, + P3SPLINE_NUM_BIT = 1, + P3SPLINE_TH_MODE_BIT = 2, + P3SPLINE_TH_MB_BIT = 8, + P3SPLINE_TH_OFFSET_BIT = 2, + P3SPLINE_TH1_BIT = 12, + P3SPLINE_TH2_BIT = 10, + P3SPLINE_TH3_BIT = 10, + P3SPLINE_STRENGTH_BIT = 8, + COLOR_SATURATION_BIT = 1, + COLOR_SATURATION_NUM_BIT = 3, + COLOR_SATURATION_GAIN_BIT = 8, + CUVA_TWELVE_BIT_NUM = 4095, + CUVA_LOW_AREA_CURVE = 0, + CUVA_SPLINE_AREA_CURVE = 1, + CUVA_HIGH_AREA_CURVE = 2, + CUVA_TYPE_BIT = 4, + MARKERBIT_BIT = 1, + DISPLAY_3BIT = 3, + DISPLAY_8BIT = 8, + DISPLAY_MARKERBIT = 1, + DISPLAY_16BIT = 16 +}; + +int PraseMetadataFromArray(const std::vector &cuvaInfo, HdrVividMetadataV1 &tmoCuvaMetadata, + HwDisplayMeta &displayMeta); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VPE_FRAMEWORK_ALGORITHM_COMMON_PARSE_METADATA_H diff --git a/framework/algorithm/common/include/vpe_utils_common.h b/framework/algorithm/common/include/vpe_utils_common.h new file mode 100644 index 0000000000000000000000000000000000000000..bb5815661d4072e34ff30cfeaafbf9cfb0a29ab2 --- /dev/null +++ b/framework/algorithm/common/include/vpe_utils_common.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_UTILS_IMPL_H +#define IMAGE_PROCESSING_UTILS_IMPL_H + +#include "pixelmap_native_impl.h" +#include "surface_buffer.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VpeUtils { +public: + static sptr GetSurfaceBufferFromPixelMap(const std::shared_ptr& pixelmap); + static bool ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr bufferImpl); + static bool ConvertSurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap); + static bool SetSurfaceBufferToPixelMap(const sptr& buffer, + std::shared_ptr& pixelmap); + static bool CopyRGBASurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap); + static bool CopyRGBAPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& buffer); + static bool CopyNV12SurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap); + static bool CopyNV12PixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& buffer); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_UTILS_IMPL_H \ No newline at end of file diff --git a/framework/algorithm/common/vpe_context.cpp b/framework/algorithm/common/vpe_context.cpp new file mode 100644 index 0000000000000000000000000000000000000000..39cb011038558f2f94a29650c6aa1575be31255e --- /dev/null +++ b/framework/algorithm/common/vpe_context.cpp @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_context.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +CreatContext::VPEContext() {} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/common/vpe_parse_metadata.cpp b/framework/algorithm/common/vpe_parse_metadata.cpp new file mode 100644 index 0000000000000000000000000000000000000000..39f6da2e73c2b4b8cbf6d12987f7fe5076d85e7e --- /dev/null +++ b/framework/algorithm/common/vpe_parse_metadata.cpp @@ -0,0 +1,312 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_parse_metadata.h" +#include +#include +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +// 做了字节颠倒ˇ把4个字节逆序排列 +static inline uint32_t ConvertEndian(uint32_t x) +{ + // 8 & 24: bit offset; 0x0000ff00 & 0x00ff0000 & 0x000000ff: bit mask + return ((x << 24) | ((x & 0x0000ff00) << 8) | ((x & 0x00ff0000) >> 8) | ((x >> 24) & 0x000000ff)); +} + +struct BsInfo { + uint8_t *head; + uint8_t *tail; + uint32_t bufA; + uint32_t bufB; + int32_t bsLen; + int32_t bufPos; + uint32_t totalPos; +}; + +static void InitBs(BsInfo *bs, const uint8_t *input, int32_t length) +{ + uint32_t data; + uint64_t alignWord = 0x3; // 使首地址4字节对齐 + + bs->head = reinterpret_cast(reinterpret_cast(input) & ~alignWord); + bs->tail = bs->head + 8; // 8 :将尾地址初始化为首地址8字节之后 + bs->bsLen = length; + + data = *reinterpret_cast(bs->head); + bs->bufA = ConvertEndian(data); + // 4 :读取后4个字节的值传给bufBˇ当要取的元数据跨越两段4字节时会用到 + data = *reinterpret_cast(bs->head + 4); + bs->bufB = ConvertEndian(data); + // 3 is bit ofsˇ记录对齐地址和实际地址的偏差量 + bs->bufPos = (reinterpret_cast(input) & 0x3) << 3; + bs->totalPos = 0; + + return; +} + +static int32_t ShowBs(const BsInfo *bs, int32_t currentBitNum) +{ + uint32_t pos = static_cast(bs->bufPos); + // 0x20:4字节ˇ如果对齐后和实际地址的偏差量大于4字节ˇ或要读取的位数为0 + // 或要取的位数大于4字节ˇ直接返回0 + if (pos >= 0x20 || currentBitNum == 0 || currentBitNum > 0x20) { + return 0; + } + // 要读取的位数超出剩余码流的位数时,直接返回0 + // 8 :一个字节占8位ˇ当前读取位小于传入数组总位数才合理 + if (static_cast(bs->totalPos + currentBitNum) > static_cast(bs->bsLen * 8)) { + return 0; + } + // 0x20ˇ32ˇ如果要取的元数据要跨越两段4个字节ˇ则要取两次值进行拼接。 + uint32_t data = 0; + int32_t abPos = currentBitNum + bs->bufPos; + if (abPos > 0x20) { + data = bs->bufA << pos; + // 0x20-pos为高四节数值和低四节数值的位偏移量ˇ移位后data1和data对齐 + uint32_t data1 = (pos == 0) ? 0 : (bs->bufB >> (0x20 - pos)); + data |= data1; + data >>= static_cast(0x20 - currentBitNum); // 0x20:4字节 + } else { // 如果要取的元数据没有跨越两个4字节ˇ则直接读取bufA移位后的值。0x20:4字节 + data = (bs->bufA << pos) >> static_cast(0x20 - currentBitNum); + } + return static_cast(data); +} + +static int32_t SkipBs(BsInfo *bs, int32_t currentBitNum) +{ + bs->totalPos += static_cast(currentBitNum); + // 8 :一个字节占8位ˇ当前读取位小于传入数组总位数才合理 + if (bs->totalPos < static_cast(bs->bsLen * 8)) { + int32_t abPos = currentBitNum + bs->bufPos; + if (abPos >= 32) { // 32 :如果ˇ地址偏移量+读取位数ˇ大于4字节 + bs->bufPos = abPos - 32; // 32 :则更新偏移量为(地址偏移量+读取位数)%32 + bs->bufA = bs->bufB; // 前四字节的buf更新为后4字节的buf + uint32_t data = *reinterpret_cast(bs->tail); + bs->bufB = ConvertEndian(data); + bs->tail += 4; // 4 :整体ˇ后移4字节 + } else { + bs->bufPos += currentBitNum; + } + } + return currentBitNum; +} + +static __inline int32_t GetBs(BsInfo *bs, int32_t currentBitNum) +{ + uint32_t data = static_cast(ShowBs(bs, currentBitNum)); + SkipBs(bs, currentBitNum); + return static_cast(data); +} + +static uint32_t HevcUv(BsInfo *bsPtr, int32_t bitNum) +{ + uint32_t code = static_cast(GetBs(bsPtr, bitNum)); + return code; +} + +void SetThreeSpline(BsInfo *bsPtr, HdrVividMetadataV1 &tmoCuvaMetadata, int index) +{ + for (unsigned int mode_i = 0; mode_i < tmoCuvaMetadata.threeSplineNum[index]; mode_i++) { + tmoCuvaMetadata.threeSplineThMode[index][mode_i] = HevcUv(bsPtr, P3SPLINE_TH_MODE_BIT); + // 对应标准7.3 + if ((tmoCuvaMetadata.threeSplineThMode[index][mode_i] == 0) || + (tmoCuvaMetadata.threeSplineThMode[index][mode_i] == 2)) { // 2: + tmoCuvaMetadata.threeSplineThMb[index][mode_i] = HevcUv(bsPtr, P3SPLINE_TH_MB_BIT); + } + tmoCuvaMetadata.threeSplineTh[index][mode_i][0] = HevcUv(bsPtr, P3SPLINE_TH1_BIT); + tmoCuvaMetadata.threeSplineTh[index][mode_i][1] = HevcUv(bsPtr, P3SPLINE_TH2_BIT); + tmoCuvaMetadata.threeSplineTh[index][mode_i][2] = HevcUv(bsPtr, P3SPLINE_TH3_BIT); // 2: + tmoCuvaMetadata.threeSplineStrength[index][mode_i] = HevcUv(bsPtr, P3SPLINE_STRENGTH_BIT); + } +} + +int SetToneMappingMode(BsInfo *bsPtr, HdrVividMetadataV1 &tmoCuvaMetadata) +{ + if (!tmoCuvaMetadata.toneMappingMode) { + return 0; + } + + tmoCuvaMetadata.toneMappingParamNum = HevcUv(bsPtr, TONE_MAPPING_PARAM_NUM_BIT); + // test + tmoCuvaMetadata.toneMappingParamNum++; + // For Memory Safety + // tone_mapping_param_num占用1bitˇ最大为1ˇ加1后最大为2 + if (tmoCuvaMetadata.toneMappingParamNum > 2) { + VPE_LOGE("ToneMappingParamNum Out Of Range, value: %i", tmoCuvaMetadata.toneMappingParamNum); + return 1; + } + for (unsigned int j = 0; j < tmoCuvaMetadata.toneMappingParamNum; j++) { + tmoCuvaMetadata.targetedSystemDisplayMaximumLuminance[j] = HevcUv(bsPtr, TARGETED_SYSTEM_DISPLAY_BIT); + tmoCuvaMetadata.baseFlag[j] = HevcUv(bsPtr, BASE_FLAG_BIT); + if (tmoCuvaMetadata.baseFlag[j]) { + tmoCuvaMetadata.baseParamMp[j] = HevcUv(bsPtr, BASE_PARAM_MP_BIT); + tmoCuvaMetadata.baseParamMm[j] = HevcUv(bsPtr, BASE_PARAM_MM_BIT); + tmoCuvaMetadata.baseParamMa[j] = HevcUv(bsPtr, BASE_PARAM_MA_BIT); + tmoCuvaMetadata.baseParamMb[j] = HevcUv(bsPtr, BASE_PARAM_MB_BIT); + tmoCuvaMetadata.baseParamMn[j] = HevcUv(bsPtr, BASE_PARAM_MN_BIT); + tmoCuvaMetadata.baseParamK1[j] = HevcUv(bsPtr, BASE_PARAM_K1_BIT); + tmoCuvaMetadata.baseParamK2[j] = HevcUv(bsPtr, BASE_PARAM_K2_BIT); + tmoCuvaMetadata.baseParamK3[j] = HevcUv(bsPtr, BASE_PARAM_K3_BIT); + tmoCuvaMetadata.baseParamDeltaMode[j] = HevcUv(bsPtr, BASE_PARAM_DELTA_MODE_BIT); + tmoCuvaMetadata.baseParamDelta[j] = HevcUv(bsPtr, BASE_PARAM_DELTA_BIT); + } + tmoCuvaMetadata.threeSplineFlag[j] = HevcUv(bsPtr, P3SPLINE_FLAG_BIT); + if (tmoCuvaMetadata.threeSplineFlag[j]) { + tmoCuvaMetadata.threeSplineNum[j] = HevcUv(bsPtr, P3SPLINE_NUM_BIT); + tmoCuvaMetadata.threeSplineNum[j]++; + // For Memory Safety + // p3SplineNum占用1bitˇ最大为1ˇ加1后最大为2 + if (tmoCuvaMetadata.threeSplineNum[j] > 2) { + VPE_LOGE("ThreeSplineNum Out Of Range, value: %i", tmoCuvaMetadata.threeSplineNum[j]); + return 1; + } + SetThreeSpline(bsPtr, tmoCuvaMetadata, j); + } + } + return 0; +} + +int SetcolorSaturationMapping(BsInfo *bsPtr, HdrVividMetadataV1 &tmoCuvaMetadata) +{ + if (tmoCuvaMetadata.colorSaturationMappingFlag) { + tmoCuvaMetadata.colorSaturationNum = HevcUv(bsPtr, COLOR_SATURATION_NUM_BIT); + // For Memory Safety + if (tmoCuvaMetadata.colorSaturationNum > 8) { // 8: + VPE_LOGE("ColorSaturationNum Out Of Range, value: %i", tmoCuvaMetadata.colorSaturationNum); + return 1; + } + for (unsigned int mode_i = 0; mode_i < tmoCuvaMetadata.colorSaturationNum; mode_i++) { + tmoCuvaMetadata.colorSaturationGain[mode_i] = HevcUv(bsPtr, COLOR_SATURATION_GAIN_BIT); + } + } + return 0; +} +void PraseDisplayMetadataFromArray(BsInfo *bsPtr, HwDisplayMeta &displayMeta) +{ + // HwDisplayMeta displayMeta; + displayMeta.type = HevcUv(bsPtr, DISPLAY_3BIT); + VPE_LOGD("displayMeta type = %{public}d", displayMeta.type); + if (displayMeta.type > 0) { + displayMeta.size = HevcUv(bsPtr, DISPLAY_16BIT); + auto markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.startX = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.startY = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.endX = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.endY = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + for (int i = 0; i < 17; i++) { // 17 + displayMeta.lutMap[i] = HevcUv(bsPtr, DISPLAY_8BIT); + if (i % 2 == 1) { // 2 + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + } + } + displayMeta.averageLuma = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.scene = HevcUv(bsPtr, DISPLAY_8BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.expo = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.iso = HevcUv(bsPtr, DISPLAY_16BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.flash = HevcUv(bsPtr, DISPLAY_8BIT); + for (int i = 0; i < 32; i++) { // 32 + displayMeta.hdrHistgram[i] = HevcUv(bsPtr, DISPLAY_8BIT); + if (i % 2 == 0) { // 2 + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + } + } + displayMeta.noiseLevel = HevcUv(bsPtr, DISPLAY_8BIT); + markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.apetureValue = HevcUv(bsPtr, DISPLAY_8BIT); + displayMeta.aelv = HevcUv(bsPtr, DISPLAY_8BIT); + displayMeta.maxHeadroom = 5115; // 5115 = 5 * 1023; + VPE_LOGD("CLink displayMeta.markerbit = %{public}d\n", markerbit); + } + if (displayMeta.type > 1) { + auto markerbit = HevcUv(bsPtr, DISPLAY_MARKERBIT); + displayMeta.maxHeadroom = HevcUv(bsPtr, DISPLAY_16BIT); + VPE_LOGD("CLink displayMeta.markerbit = %{public}d\n", markerbit); + VPE_LOGD("CLink displayMeta.maxHeadroom = %{public}d\n", displayMeta.maxHeadroom); + } + return; +} +uint32_t PraseMatadataType(BsInfo *bsPtr) +{ + HevcUv(bsPtr, MARKERBIT_BIT); + uint32_t type = HevcUv(bsPtr, CUVA_TYPE_BIT); + VPE_LOGD("HDR: extension metadata type = %{public}d", type); + return type; +} + +int PraseMetadataFromArray(const std::vector &cuvaInfo, HdrVividMetadataV1 &tmoCuvaMetadata, + HwDisplayMeta &displayMeta) +{ + BsInfo bs {}; + BsInfo *bsPtr = &bs; + int cuvaInfoSize = static_cast(cuvaInfo.size()); + if (cuvaInfoSize == AIHDR_METADATA_MAX_LENGTH) { + tmoCuvaMetadata.gtmLut = cuvaInfo; + } + VPE_LOGD("cuvaInfoSize = %{public}d\n", cuvaInfoSize); + std::vector cuvaInfoCopy; + if (cuvaInfoSize > METADATA_MAX_LENGTH) { // 最大metadata长度512 + cuvaInfoSize = METADATA_MAX_LENGTH; + } + cuvaInfoCopy.resize(METADATA_MAX_LENGTH); // 最大metadata长度512 + errno_t res = memcpy_s(cuvaInfoCopy.data(), cuvaInfoCopy.size(), cuvaInfo.data(), cuvaInfo.size()); + CHECK_AND_RETURN_RET_LOG(res == EOK, res, "memcpy_s failed"); + InitBs(bsPtr, &cuvaInfoCopy[0], cuvaInfoSize); + tmoCuvaMetadata.systemStartCode = HevcUv(bsPtr, SYSTEM_START_CODE_BIT); + int numWindows = 0; + if (tmoCuvaMetadata.systemStartCode == 1) { + numWindows = 1; + } + + for (int i = 0; i < numWindows; i++) { + tmoCuvaMetadata.minimumMaxRgbPq = HevcUv(bsPtr, MINIMUM_MAXRGB_BIT); + tmoCuvaMetadata.averageMaxRgbPq = HevcUv(bsPtr, AVERAGE_MAXRGB_BIT); + tmoCuvaMetadata.varianceMaxRgbPq = HevcUv(bsPtr, VARIANCE_MAXRGB_BIT); + tmoCuvaMetadata.maximumMaxRgbPq = HevcUv(bsPtr, MAXIMUM_MAXRGB_BIT); + tmoCuvaMetadata.toneMappingMode = HevcUv(bsPtr, TONE_MAPPING_MODE_BIT); + int ret = SetToneMappingMode(bsPtr, tmoCuvaMetadata); + if (ret != 0) { return ret; } + tmoCuvaMetadata.colorSaturationMappingFlag = HevcUv(bsPtr, COLOR_SATURATION_BIT); + ret = SetcolorSaturationMapping(bsPtr, tmoCuvaMetadata); + if (ret != 0) { return ret; } + } + // 8 :一个字节占8位ˇ当前读取位小于传入数组总位数才合理 + while (bsPtr->totalPos + 8 < static_cast(bsPtr->bsLen * 8)) { // 存在后续元数据内容 + uint32_t metadataType = PraseMatadataType(bsPtr); + if ((metadataType & 0x3) == 1) { + // ToDo: Prase LocalToneMapping Metadata + VPE_LOGD("LTM Metadata Prase!"); + } else if ((metadataType & 0x3) == 3) { // 3 拍显元数据标志位 + VPE_LOGD("CLink Metadata Prase!"); + PraseDisplayMetadataFromArray(bsPtr, displayMeta); + } else { + VPE_LOGD("Unknown Extension Metadata type!"); + break; + } + } + return 0; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/common/vpe_utils_common.cpp b/framework/algorithm/common/vpe_utils_common.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e309fb721cf9dedd1046a61990b188c85077fe5d --- /dev/null +++ b/framework/algorithm/common/vpe_utils_common.cpp @@ -0,0 +1,265 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_utils_common.h" + +#include + +#include "algorithm_utils.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::map FORMAT_MAP = { + { OHOS::Media::PixelFormat::RGBA_8888, GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { OHOS::Media::PixelFormat::BGRA_8888, GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BGRA_8888 }, + { OHOS::Media::PixelFormat::NV21, GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { OHOS::Media::PixelFormat::NV12, GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, +}; +float GetPixelFormatCoefficient(OHOS::Media::PixelFormat format) +{ + float coefficient; + switch (format) { + case OHOS::Media::PixelFormat::RGBA_8888: + case OHOS::Media::PixelFormat::BGRA_8888: + coefficient = 4.0; // 4.0 size coefficient + break; + case OHOS::Media::PixelFormat::NV12: + case OHOS::Media::PixelFormat::NV21: + coefficient = 1.0; // 1.0 size coefficient + break; + default: + coefficient = 3.0; // 3.0 size coefficient + break; + } + return coefficient; +} +} + +sptr VpeUtils::GetSurfaceBufferFromPixelMap( + const std::shared_ptr& pixelmap) +{ + if (pixelmap->GetAllocatorType() == AllocatorType::DMA_ALLOC) { + return reinterpret_cast(pixelmap->GetFd()); + } + auto buffer = SurfaceBuffer::Create(); + CHECK_AND_RETURN_RET_LOG(buffer != nullptr, nullptr, "get surface buffer failed!"); + CHECK_AND_RETURN_RET_LOG(ConvertPixelmapToSurfaceBuffer(pixelmap, buffer), nullptr, + "get surface buffer failed!"); + return buffer; +} + +bool VpeUtils::ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr bufferImpl) +{ + auto it = FORMAT_MAP.find(pixelmap->GetPixelFormat()); + if (it == FORMAT_MAP.end()) [[unlikely]] { + VPE_LOGE("unsupported format: %{public}d", pixelmap->GetPixelFormat()); + return false; + } + BufferRequestConfig bfConfig = {}; + bfConfig.width = pixelmap->GetWidth(); + bfConfig.height = pixelmap->GetHeight(); + bfConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA | BUFFER_USAGE_MEM_MMZ_CACHE; + bfConfig.strideAlignment = bfConfig.width; + bfConfig.format = it->second; + bfConfig.timeout = 0; + bfConfig.colorGamut = GraphicColorGamut::GRAPHIC_COLOR_GAMUT_SRGB; + bfConfig.transform = GraphicTransformType::GRAPHIC_ROTATE_NONE; + CHECK_AND_RETURN_RET_LOG((bufferImpl->Alloc(bfConfig) == GSERROR_OK), false, "invalid OH_PixelmapNative image"); + bool ret; + switch (pixelmap->GetPixelFormat()) { + case OHOS::Media::PixelFormat::RGBA_8888: + case OHOS::Media::PixelFormat::BGRA_8888: + ret = CopyRGBAPixelmapToSurfaceBuffer(pixelmap, bufferImpl); + break; + case OHOS::Media::PixelFormat::NV12: + case OHOS::Media::PixelFormat::NV21: + ret = CopyNV12PixelmapToSurfaceBuffer(pixelmap, bufferImpl); + break; + default: + ret = false; + break; + } + return ret; +} + +bool VpeUtils::ConvertSurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + bool ret; + switch (pixelmap->GetPixelFormat()) { + case OHOS::Media::PixelFormat::RGBA_8888: + case OHOS::Media::PixelFormat::BGRA_8888: + ret = CopyRGBASurfaceBufferToPixelmap(buffer, pixelmap); + break; + case OHOS::Media::PixelFormat::NV12: + case OHOS::Media::PixelFormat::NV21: + ret = CopyNV12SurfaceBufferToPixelmap(buffer, pixelmap); + break; + default: + ret = false; + break; + } + return ret; +} + +bool VpeUtils::CopyRGBASurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + int32_t pixelmapStride = pixelmap->GetRowStride() > 1.5 * buffer->GetStride() ? // 1.5 + pixelmap->GetRowStride() / 2 : pixelmap->GetRowStride(); // 2 + int rowSize = std::min(buffer->GetStride(), pixelmapStride); + for (int i = 0; i < buffer->GetHeight(); ++i) { + if (memcpy_s(static_cast(pixelmap->GetWritablePixels()) + i * pixelmapStride, rowSize, + static_cast(buffer->GetVirAddr()) + i * buffer->GetStride(), rowSize) != EOK) { + VPE_LOGE("Failed to copy image buffer!"); + return false; + } + } + return true; +} + +bool VpeUtils::CopyRGBAPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& buffer) +{ + int32_t rowBufferSize = pixelmap->GetWidth() * GetPixelFormatCoefficient(pixelmap->GetPixelFormat()); + int32_t pixelmapStride = pixelmap->GetRowStride() > 1.5 * buffer->GetStride() ? // 1.5 + pixelmap->GetRowStride() / 2 : pixelmap->GetRowStride(); // 2 + for (int i = 0; i < pixelmap->GetHeight(); ++i) { + if (memcpy_s(static_cast(buffer->GetVirAddr()) + i * buffer->GetStride(), + rowBufferSize, pixelmap->GetPixels() + i * pixelmapStride, rowBufferSize) != EOK) { + VPE_LOGE("copy data failed!"); + return false; + } + } + return true; +} + +bool VpeUtils::CopyNV12SurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + YUVDataInfo yuvInfo; + pixelmap->GetImageYUVInfo(yuvInfo); + int32_t pixelmapStride = pixelmap->GetRowStride() > 1.5 * buffer->GetStride() ? // 1.5 + pixelmap->GetRowStride() / 2 : pixelmap->GetRowStride(); // 2 + int32_t pixelmapUvStride = static_cast((yuvInfo.uvStride > 0) ? + static_cast(yuvInfo.uvStride) : pixelmapStride); + int32_t pixelmapUvOffset = static_cast((yuvInfo.uvOffset > 0) ? + static_cast(yuvInfo.uvOffset) : pixelmapStride * pixelmap->GetHeight()); + int32_t pixelmapUvHeight = static_cast((yuvInfo.uvHeight > 0) ? + static_cast(yuvInfo.uvHeight) : (pixelmap->GetHeight() + 1) / 2); // 2 + + int32_t bufferUvStride = buffer->GetStride(); + int32_t bufferUvOffset = buffer->GetStride() * buffer->GetHeight(); + int32_t bufferUvHeight = (buffer->GetHeight() + 1) / 2; + void *planesInfoPtr = nullptr; + buffer->GetPlanesInfo(&planesInfoPtr); + auto planesInfo = static_cast(planesInfoPtr); + + if (planesInfo != nullptr) { + int idx = (pixelmap->GetPixelFormat() == OHOS::Media::PixelFormat::NV12) ? 1 : 2; // 1 NV12, 2 NV21 + bufferUvStride = static_cast(planesInfo->planes[idx].columnStride); + bufferUvOffset = static_cast(planesInfo->planes[idx].offset); + if (bufferUvStride != 0) { + bufferUvHeight = static_cast(planesInfo->planes[idx].offset) / bufferUvStride; + } + } + + int32_t yStride = std::min(pixelmapStride, buffer->GetStride()); + for (int i = 0; i < pixelmap->GetHeight(); ++i) { + if (memcpy_s(static_cast(pixelmap->GetWritablePixels()) + i * pixelmapStride, yStride, + static_cast(buffer->GetVirAddr()) + i * buffer->GetStride(), yStride) != EOK) { + VPE_LOGE("Failed to copy NV12 buffer to pixelmap!"); + return false; + } + } + + int32_t uvStride = std::min(pixelmapUvStride, bufferUvStride); + int32_t uvHeight = std::min(pixelmapUvHeight, bufferUvHeight); + for (int i = 0; i < uvHeight; ++i) { + if (memcpy_s(static_cast(pixelmap->GetWritablePixels()) + pixelmapUvOffset + i * pixelmapUvStride, + uvStride, static_cast(buffer->GetVirAddr()) + bufferUvOffset + i * bufferUvStride, + uvStride) != EOK) { + VPE_LOGE("Failed to copy NV12 buffer to pixelmap!"); + return false; + } + } + return true; +} + +bool VpeUtils::CopyNV12PixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& buffer) +{ + YUVDataInfo yuvInfo; + pixelmap->GetImageYUVInfo(yuvInfo); + int32_t pixelmapStride = pixelmap->GetRowStride() > 1.5 * buffer->GetStride() ? // 1.5 + pixelmap->GetRowStride() / 2 : pixelmap->GetRowStride(); // 2 + int32_t pixelmapUvStride = static_cast((yuvInfo.uvStride > 0) ? + static_cast(yuvInfo.uvStride) : pixelmapStride); + int32_t pixelmapUvOffset = static_cast((yuvInfo.uvOffset > 0) ? + static_cast(yuvInfo.uvOffset) : pixelmapStride * pixelmap->GetHeight()); + int32_t pixelmapUvHeight = static_cast((yuvInfo.uvHeight > 0) ? + static_cast(yuvInfo.uvHeight) : (pixelmap->GetHeight() + 1) / 2); + + int32_t bufferUvStride = buffer->GetStride(); + int32_t bufferUvOffset = buffer->GetStride() * buffer->GetHeight(); + int32_t bufferUvHeight = (buffer->GetHeight() + 1) / 2; // 2 + void *planesInfoPtr = nullptr; + buffer->GetPlanesInfo(&planesInfoPtr); + auto planesInfo = static_cast(planesInfoPtr); + + if (planesInfo != nullptr) { + int idx = (pixelmap->GetPixelFormat() == OHOS::Media::PixelFormat::NV12) ? 1 : 2; // 1 NV12, 2 NV21 + bufferUvStride = static_cast(planesInfo->planes[idx].columnStride); + bufferUvOffset = static_cast(planesInfo->planes[idx].offset); + if (bufferUvStride != 0) { + bufferUvHeight = static_cast(planesInfo->planes[idx].offset) / bufferUvStride; + } + } + int32_t yStride = std::min(pixelmapStride, buffer->GetStride()); + for (int i = 0; i < pixelmap->GetHeight(); ++i) { + if (memcpy_s(static_cast(buffer->GetVirAddr()) + i * buffer->GetStride(), yStride, + static_cast(pixelmap->GetWritablePixels()) + i * pixelmapStride, yStride) != EOK) { + VPE_LOGE("Failed to copy NV12 buffer to pixelmap!"); + return false; + } + } + + int32_t uvStride = std::min(pixelmapUvStride, bufferUvStride); + int32_t uvHeight = std::min(pixelmapUvHeight, bufferUvHeight); + for (int i = 0; i < uvHeight; ++i) { + if (memcpy_s(static_cast(buffer->GetVirAddr()) + bufferUvOffset + i * bufferUvStride, uvStride, + static_cast(pixelmap->GetWritablePixels()) + pixelmapUvOffset + i * pixelmapUvStride, + uvStride) != EOK) { + VPE_LOGE("Failed to copy NV12 buffer to pixelmap!"); + return false; + } + } + return true; +} + +bool VpeUtils::SetSurfaceBufferToPixelMap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + if (pixelmap->GetAllocatorType() == AllocatorType::DMA_ALLOC) { + return true; + } + return ConvertSurfaceBufferToPixelmap(buffer, pixelmap); +} diff --git a/framework/algorithm/contrast_enhancer/include/contrast_enhancer_base.h b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_base.h new file mode 100644 index 0000000000000000000000000000000000000000..f702718cd83ca84224d633598e3eaad5d4c0ac0e --- /dev/null +++ b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_base.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CONTRAST_ENHANCER_BASE_H +#define CONTRAST_ENHANCER_BASE_H + +#include "algorithm_errors.h" +#include "refbase.h" +#include "surface_buffer.h" + +#include "contrast_enhancer_image.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class ContrastEnhancerBase { +public: + ContrastEnhancerBase() = default; + virtual ~ContrastEnhancerBase() = default; + ContrastEnhancerBase(const ContrastEnhancerBase&) = delete; + ContrastEnhancerBase& operator=(const ContrastEnhancerBase&) = delete; + ContrastEnhancerBase(ContrastEnhancerBase&&) = delete; + ContrastEnhancerBase& operator=(ContrastEnhancerBase&&) = delete; + + virtual VPEAlgoErrCode Init() = 0; + virtual VPEAlgoErrCode Deinit() = 0; + virtual VPEAlgoErrCode SetParameter(const ContrastEnhancerParameters& parameter, int type, bool flag) = 0; + + virtual VPEAlgoErrCode GetRegionHist(const sptr& input) = 0; + virtual bool UpdateMetadataBasedOnLcd(OHOS::Rect rect, int lcdWidth, int lcdHeight, + sptr surfaceBuffer) = 0; + virtual bool UpdateMetadataBasedOnDetail(OHOS::Rect displayArea, OHOS::Rect curPixelmapArea, + OHOS::Rect completePixelmapArea, sptr surfaceBuffer, float fullRatio) = 0; +}; + +using ContrastEnhancerCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif /* CONTRAST_ENHANCER_BASE_H */ diff --git a/framework/algorithm/contrast_enhancer/include/contrast_enhancer_capability.h b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..0641a6afb437954da487c34e3e1c2501a8c57776 --- /dev/null +++ b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_capability.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_CONTRAST_ENHANCER_CAPABILITY_H +#define FRAMEWORK_ALGORITHM_CONTRAST_ENHANCER_CAPABILITY_H + +#include +#include + +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct ContrastEnhancerCapability { + std::vector levels; + uint32_t rank; + int32_t version; +}; + +using ContrastEnhancerCapabilitiesBuilder = std::function; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_CONTRAST_ENHANCER_CAPABILITY_H diff --git a/framework/algorithm/contrast_enhancer/include/contrast_enhancer_image_fwk.h b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_image_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..86cc77850ceefc92ba682b23bddea4a8712e980e --- /dev/null +++ b/framework/algorithm/contrast_enhancer/include/contrast_enhancer_image_fwk.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CONTRAST_ENHANCER_IMAGE_FWK_H +#define CONTRAST_ENHANCER_IMAGE_FWK_H + +#include + +#include "contrast_enhancer_image.h" +#include "contrast_enhancer_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class ContrastEnhancerImageFwk : public ContrastEnhancerImage { +public: + explicit ContrastEnhancerImageFwk(); + ~ContrastEnhancerImageFwk() override; + ContrastEnhancerImageFwk(const ContrastEnhancerImageFwk&) = delete; + ContrastEnhancerImageFwk& operator=(const ContrastEnhancerImageFwk&) = delete; + ContrastEnhancerImageFwk(ContrastEnhancerImageFwk&&) = delete; + ContrastEnhancerImageFwk& operator=(ContrastEnhancerImageFwk&&) = delete; + + VPEAlgoErrCode SetParameter(const ContrastEnhancerParameters& parameter) override; + VPEAlgoErrCode GetParameter(ContrastEnhancerParameters& parameter) const override; + VPEAlgoErrCode GetRegionHist(const sptr& input) override; + VPEAlgoErrCode UpdateMetadataBasedOnLcd(OHOS::Rect displayArea, int lcdWidth, int lcdHeight, + sptr surfaceBuffer) override; + VPEAlgoErrCode UpdateMetadataBasedOnDetail(OHOS::Rect displayArea, OHOS::Rect curPixelmapArea, + OHOS::Rect completePixelmapArea, sptr surfaceBuffer, float fullRatio) override; +private: + std::shared_ptr GetAlgorithm(int feature); + std::shared_ptr CreateAlgorithm(int feature); + bool IsValidProcessedObject(const sptr& buffer); + + ContrastEnhancerParameters parameter_{}; + mutable std::mutex lock_{}; + std::mutex getAlgoLock_{}; + std::unordered_map> algorithms_{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // CONTRAST_ENHANCER_IMAGE_IMPL_H diff --git a/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp b/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c0a796759093edd3a4d8ad0aff1b90f1b6c04df6 --- /dev/null +++ b/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp @@ -0,0 +1,341 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail_enhancer_image_fwk.h" + +#include + +#include "detail_enhancer_common.h" +#include "extension_manager.h" +#include "native_buffer.h" +#include "surface_buffer.h" +#include "video_processing_client.h" +#include "vpe_log.h" +#include "vpe_trace.h" + +namespace { +enum RectLevelItem { + RECT_LEVEL_INVALID = -1, + RECT_MIN_WIDTH, + RECT_MAX_WIDTH, + RECT_MIN_HEIGHT, + RECT_MAX_HEIGHT, + RECT_LEVEL_NUM, +}; + +constexpr float EPSILON = 1e-6; // extremely small value +const int MAX_URL_LENGTH = 100; +const int SUPPORTED_MIN_WIDTH = 32; +const int SUPPORTED_MIN_HEIGHT = 32; +const int SUPPORTED_MAX_WIDTH = 8192; +const int SUPPORTED_MAX_HEIGHT = 8192; +const int TIMEOUT_THRESHOLD = 10; // 10 millisecond +const std::unordered_set SUPPORTED_FORMATS = { + OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888, // BGRA + OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, // RGBA + OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP, // NV12 + OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP, // NV21 + OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P, // YU12 + OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P, // YV12 + OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, // RGBA_1010102 +}; +const std::vector> SUPER_LEVEL_TARGET_RECT = { + {1, 1104, 1, 848}, + {1, 1104, 1, 1488}, + {1, 1488, 1, 1104}, + {1, 1872, 1, 1360}, +}; + +inline bool IsValidSurfaceBuffer(const OHOS::sptr& buffer) +{ + CHECK_AND_RETURN_RET_LOG(buffer != nullptr, false, "buffer is nullptr!!"); + return SUPPORTED_FORMATS.find(buffer->GetFormat()) != SUPPORTED_FORMATS.end() && + buffer->GetWidth() > SUPPORTED_MIN_WIDTH && buffer->GetHeight() > SUPPORTED_MIN_HEIGHT && + buffer->GetWidth() <= SUPPORTED_MAX_WIDTH && buffer->GetHeight() <= SUPPORTED_MAX_HEIGHT; +} + +std::atomic g_instanceId = -1; +std::timed_mutex g_externLock{}; +} + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +DetailEnhancerImageFwk::DetailEnhancerImageFwk(int type) +{ + type_ = (type >= IMAGE && type <= VIDEO) ? type : IMAGE; + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +DetailEnhancerImageFwk::~DetailEnhancerImageFwk() +{ + algorithms_.clear(); + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +std::shared_ptr DetailEnhancerImage::Create(int type) +{ + CHECK_AND_RETURN_RET_LOG(type <= VIDEO && type >= IMAGE, nullptr, "type is invalid!!"); + std::shared_ptr impl = std::make_shared(type); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, "failed to init DetailEnhancerImage"); + return impl; +} + +std::shared_ptr DetailEnhancerImageFwk::GetAlgorithm(int level) +{ + if (level < DETAIL_ENH_LEVEL_NONE || level > DETAIL_ENH_LEVEL_VIDEO) { + VPE_LOGE("Invalid level:%{public}d", level); + return nullptr; + } + std::lock_guard lock(lock_); + auto createdImpl = algorithms_.find(level); + if (createdImpl != algorithms_.end()) [[likely]] { + return createdImpl->second; + } + algorithms_[level] = CreateAlgorithm(level); + return algorithms_[level]; +} + +std::shared_ptr DetailEnhancerImageFwk::CreateAlgorithm(int level) +{ + auto& manager = Extension::ExtensionManager::GetInstance(); + VPE_SYNC_TRACE; + std::shared_ptr algoImpl = manager.CreateDetailEnhancer(level); + if (algoImpl == nullptr) { + VPE_LOGE("Extension create failed, get a empty impl, level: %{public}d", level); + return nullptr; + } + if (algoImpl->Init() != VPE_ALGO_ERR_OK) { + VPE_LOGE("Init failed, extension level: %{public}d", level); + return nullptr; + } + return algoImpl; +} + +VPEAlgoErrCode DetailEnhancerImageFwk::SetParameter(const DetailEnhancerParameters& parameter) +{ + CHECK_AND_RETURN_RET_LOG(parameter.level >= DETAIL_ENH_LEVEL_NONE && parameter.level <= DETAIL_ENH_LEVEL_HIGH && + parameter.uri.length() < MAX_URL_LENGTH, VPE_ALGO_ERR_INVALID_VAL, "Invalid parameter"); + std::lock_guard lock(lock_); + parameter_ = parameter; + parameterUpdated = true; + VPE_LOGI("DetailEnhancerImageFwk SetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerImageFwk::GetParameter(DetailEnhancerParameters& parameter) const +{ + std::lock_guard lock(lock_); + parameter = parameter_; + VPE_LOGI("DetailEnhancerImageFwk SetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +bool DetailEnhancerImageFwk::IsValidProcessedObject(const sptr& input, + const sptr& output) +{ + CHECK_AND_RETURN_RET_LOG((input != nullptr) && (output != nullptr), + false, "Input or output is nullptr"); + CHECK_AND_RETURN_RET_LOG(input->GetFormat() == output->GetFormat(), false, + "The input format and output format need to be consistent"); + CHECK_AND_RETURN_RET_LOG(IsValidSurfaceBuffer(input) && IsValidSurfaceBuffer(output), false, "Invalid buffer"); + return true; +} + +int DetailEnhancerImageFwk::EvaluateTargetLevel(const sptr& input, + const sptr& output, float widthRatio, float heightRatio) const +{ + CHECK_AND_RETURN_RET_LOG((input != nullptr) && (output != nullptr), false, "Input or output is nullptr"); + if (parameter_.level == DETAIL_ENH_LEVEL_HIGH) { + int inputW = input->GetWidth(); + int inputH = input->GetHeight(); + if (widthRatio < 1.0 && heightRatio < 1.0 && // 1.0 means zoom out + // 0.5 means rounding, 2 means two pixels + std::abs(static_cast(widthRatio * inputW + 0.5) - static_cast(heightRatio * inputW + 0.5)) <= 2 && + // 0.5 means rounding, 2 means two pixels + std::abs(static_cast(widthRatio * inputH + 0.5) - static_cast(heightRatio * inputH + 0.5)) <= 2) { + VPE_LOGI("Prioritize using extream vision algo when scaling down scenes"); + return DETAIL_ENH_LEVEL_HIGH; + } + return DETAIL_ENH_LEVEL_HIGH_AISR; + } + return parameter_.level; +} + +VPEAlgoErrCode DetailEnhancerImageFwk::ProcessVideo(const sptr& input, + const sptr& output, bool flag) +{ + auto algoImpl = GetAlgorithm(DETAIL_ENH_LEVEL_VIDEO); + if (algoImpl == nullptr) { + VPE_LOGE("Get Algorithm impl for video failed!"); + return VPE_ALGO_ERR_UNKNOWN; + } + if (parameterUpdated.load() && (algoImpl->SetParameter(parameter_, type_, flag) != VPE_ALGO_ERR_OK)) { + VPE_LOGE("set parameter failed!"); + return VPE_ALGO_ERR_UNKNOWN; + } else { + parameterUpdated = false; + } + if (algoImpl->Process(input, output) != VPE_ALGO_ERR_OK) { + VPE_LOGE("process video failed"); + return VPE_ALGO_ERR_UNKNOWN; + } + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerImageFwk::Process(const sptr& input, const sptr& output, + bool flag) +{ + CHECK_AND_RETURN_RET_LOG(IsValidProcessedObject(input, output), VPE_ALGO_ERR_INVALID_VAL, + "Invalid processd object"); + VPE_SYNC_TRACE; + if (parameter_.forceEve) { + auto algoImpl = GetAlgorithm(DETAIL_ENH_LEVEL_HIGH); + CHECK_AND_RETURN_RET_LOG(algoImpl != nullptr && algoImpl->SetParameter(parameter_, type_, flag) == + VPE_ALGO_ERR_OK, VPE_ALGO_ERR_UNKNOWN, "set parameter failed!"); + return algoImpl->Process(input, output); + } + if (type_ == VIDEO) { + return ProcessVideo(input, output, flag); + } + float widthRatio = static_cast(output->GetWidth()) / static_cast(input->GetWidth()); + float heightRatio = static_cast(output->GetHeight()) / static_cast(input->GetHeight()); + int targetLevel = EvaluateTargetLevel(input, output, widthRatio, heightRatio); + if (targetLevel < DETAIL_ENH_LEVEL_HIGH_AISR && + std::fabs(widthRatio - 1.0f) < EPSILON && std::fabs(heightRatio - 1.0f) < EPSILON) { + VPE_LOGI("The current scaling ratio is 1.0, and the algorithm is not AISR, so copy it directly."); + return (memcpy_s(output->GetVirAddr(), output->GetSize(), input->GetVirAddr(), input->GetSize()) == EOK) ? + VPE_ALGO_ERR_OK : VPE_ALGO_ERR_UNKNOWN; + } + bool processSuccessfully = false; + for (int level = targetLevel; level >= DETAIL_ENH_LEVEL_NONE; level--) { + auto algoImpl = GetAlgorithm(level); + if (algoImpl == nullptr) { + VPE_LOGE("Get Algorithm impl for %{public}d failed!", level); + continue; + } + parameter_.level = static_cast((level == DETAIL_ENH_LEVEL_HIGH_AISR) ? + DETAIL_ENH_LEVEL_HIGH : level); // map level + if (algoImpl->SetParameter(parameter_, type_, flag) != VPE_ALGO_ERR_OK) { + VPE_LOGE("set parameter failed!"); + return VPE_ALGO_ERR_UNKNOWN; + } + if (algoImpl->Process(input, output) == VPE_ALGO_ERR_OK) { + processSuccessfully = true; + break; + } else if (level == DETAIL_ENH_LEVEL_HIGH_AISR) { + VPE_LOGD("AISR processed failed, try to process by EVE"); + } else if (level > DETAIL_ENH_LEVEL_NONE) { + VPE_LOGW("Failed to process with level %{public}d", level); + } else { + VPE_LOGE("Failed to process with detail enhancer"); + return VPE_ALGO_ERR_UNKNOWN; + } + } + return processSuccessfully ? VPE_ALGO_ERR_OK : VPE_ALGO_ERR_INVALID_VAL; +} + +int32_t DetailEnhancerCreate(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(g_externLock.try_lock_for(std::chrono::milliseconds(TIMEOUT_THRESHOLD)), + VPE_ALGO_ERR_INVALID_VAL, "get lock timeout"); + if (instance == nullptr) { + VPE_LOGE("invalid instance"); + g_externLock.unlock(); + return VPE_ALGO_ERR_INVALID_VAL; + } + if (g_instanceId != -1) { + // if there is an instance, return it + *instance = g_instanceId; + g_externLock.unlock(); + return VPE_ALGO_ERR_OK; + } + auto detailEnh = DetailEnhancerImage::Create(); + if (detailEnh == nullptr) { + VPE_LOGE("cannot create instance"); + g_externLock.unlock(); + return VPE_ALGO_ERR_INVALID_VAL; + } + Extension::ExtensionManager::InstanceVariableType instanceVar { detailEnh }; + int32_t newId = Extension::ExtensionManager::GetInstance().NewInstanceId(instanceVar); + if (newId == -1) { + VPE_LOGE("cannot create more instance"); + g_externLock.unlock(); + return VPE_ALGO_ERR_NO_MEMORY; + } + *instance = newId; + g_instanceId = newId; + g_externLock.unlock(); + return VPE_ALGO_ERR_OK; +} + +sptr CreateSurfaceBufFromNativeWindow(OHNativeWindowBuffer* image) +{ + OH_NativeBuffer* imageNativeBuffer = nullptr; + CHECK_AND_RETURN_RET_LOG(OH_NativeBuffer_FromNativeWindowBuffer(image, &imageNativeBuffer) == GSERROR_OK, + nullptr, "invalid input or output image"); + sptr imageSurfaceBuffer(SurfaceBuffer::NativeBufferToSurfaceBuffer(imageNativeBuffer)); + return imageSurfaceBuffer; +} + +int32_t DetailEnhancerProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage, + OHNativeWindowBuffer* outputImage, int32_t level) +{ + CHECK_AND_RETURN_RET_LOG(g_externLock.try_lock_for(std::chrono::milliseconds(TIMEOUT_THRESHOLD)), + VPE_ALGO_ERR_INVALID_VAL, "get lock timeout"); + if (inputImage == nullptr || outputImage == nullptr) { + VPE_LOGE("invalid parameters"); + g_externLock.unlock(); + return VPE_ALGO_ERR_INVALID_VAL; + } + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + if (someInstance == std::nullopt) { + VPE_LOGE("invalid instance"); + g_externLock.unlock(); + return VPE_ALGO_ERR_INVALID_VAL; + } + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputImage, outputImage, &ret, &level](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + sptr inputImageSurfaceBuffer = CreateSurfaceBufFromNativeWindow(inputImage); + sptr outputImageSurfaceBuffer = CreateSurfaceBufFromNativeWindow(outputImage); + DetailEnhancerParameters param { + .uri = "", + .level = static_cast(level), + }; + var->SetParameter(param); + ret = var->Process(inputImageSurfaceBuffer, outputImageSurfaceBuffer); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + g_externLock.unlock(); + return ret; +} + +int32_t DetailEnhancerDestroy(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "instance is null"); + int ret = Extension::ExtensionManager::GetInstance().RemoveInstanceReference(*instance); + if (ret == VPE_ALGO_ERR_OK) { + g_instanceId = -1; + } + return ret; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/detail_enhancer/include/detail_enhancer_base.h b/framework/algorithm/detail_enhancer/include/detail_enhancer_base.h new file mode 100644 index 0000000000000000000000000000000000000000..52953e9ec4b5781d639113340df8157d6a86d088 --- /dev/null +++ b/framework/algorithm/detail_enhancer/include/detail_enhancer_base.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_BASE_H +#define DETAIL_ENHANCER_BASE_H + +#include "algorithm_errors.h" +#include "detail_enhancer_common.h" +#include "refbase.h" +#include "surface_buffer.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum DetailEnhancerHighLevel { + // The high level adopts downshift logic and uses AISR by default. If AISR processing fails, downshift to EVE. + // Do not modify the high-level logical sequence + DETAIL_ENH_LEVEL_HIGH_EVE = DETAIL_ENH_LEVEL_HIGH, + DETAIL_ENH_LEVEL_HIGH_AISR = DETAIL_ENH_LEVEL_HIGH + 1, + DETAIL_ENH_LEVEL_VIDEO = DETAIL_ENH_LEVEL_HIGH + 2, +}; + +class DetailEnhancerBase { +public: + enum DetailEnhancerType { + DETAIL_ENH_TYPE_IMAGE = 0, + DETAIL_ENH_TYPE_VIDEO, + }; + + DetailEnhancerBase() = default; + virtual ~DetailEnhancerBase() = default; + DetailEnhancerBase(const DetailEnhancerBase&) = delete; + DetailEnhancerBase& operator=(const DetailEnhancerBase&) = delete; + DetailEnhancerBase(DetailEnhancerBase&&) = delete; + DetailEnhancerBase& operator=(DetailEnhancerBase&&) = delete; + + virtual VPEAlgoErrCode Init() = 0; + virtual VPEAlgoErrCode Deinit() = 0; + virtual VPEAlgoErrCode SetParameter(const DetailEnhancerParameters& parameter, int type, bool flag) = 0; + virtual VPEAlgoErrCode Process(const sptr& input, const sptr& output) = 0; +}; + +using DetailEnhancerCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif /* DETAIL_ENHANCER_BASE_H */ diff --git a/framework/algorithm/detail_enhancer/include/detail_enhancer_capability.h b/framework/algorithm/detail_enhancer/include/detail_enhancer_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..186a8d56fb5aa6871ea5464b351934907307639e --- /dev/null +++ b/framework/algorithm/detail_enhancer/include/detail_enhancer_capability.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_DETAIL_ENHANCER_CAPABILITY_H +#define FRAMEWORK_ALGORITHM_DETAIL_ENHANCER_CAPABILITY_H + +#include +#include + +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct DetailEnhancerCapability { + std::vector levels; + uint32_t rank; + int32_t version; +}; + +using DetailEnhancerCapabilitiesBuilder = std::function; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_DETAIL_ENHANCER_CAPABILITY_H diff --git a/framework/algorithm/detail_enhancer/include/detail_enhancer_image_fwk.h b/framework/algorithm/detail_enhancer/include/detail_enhancer_image_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..04572ff6379c6f6d3ecfd90c569e06f04df9b94b --- /dev/null +++ b/framework/algorithm/detail_enhancer/include/detail_enhancer_image_fwk.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_IMAGE_FWK_H +#define DETAIL_ENHANCER_IMAGE_FWK_H + +#include + +#include "detail_enhancer_image.h" +#include "detail_enhancer_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class DetailEnhancerImageFwk : public DetailEnhancerImage { +public: + explicit DetailEnhancerImageFwk(int type); + ~DetailEnhancerImageFwk() override; + DetailEnhancerImageFwk(const DetailEnhancerImageFwk&) = delete; + DetailEnhancerImageFwk& operator=(const DetailEnhancerImageFwk&) = delete; + DetailEnhancerImageFwk(DetailEnhancerImageFwk&&) = delete; + DetailEnhancerImageFwk& operator=(DetailEnhancerImageFwk&&) = delete; + + VPEAlgoErrCode SetParameter(const DetailEnhancerParameters& parameter) override; + VPEAlgoErrCode GetParameter(DetailEnhancerParameters& parameter) const override; + VPEAlgoErrCode Process(const sptr& input, const sptr& output, bool flag) override; + +private: + std::shared_ptr GetAlgorithm(int feature); + std::shared_ptr CreateAlgorithm(int feature); + bool IsValidProcessedObject(const sptr& input, const sptr& output); + int EvaluateTargetLevel(const sptr& input, const sptr& output, + float widthRatio, float heightRatio) const; + VPEAlgoErrCode ProcessVideo(const sptr& input, const sptr& output, bool flag); + + DetailEnhancerParameters parameter_{}; + mutable std::mutex lock_{}; + std::unordered_map> algorithms_{}; + int type_; + std::atomic parameterUpdated{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_IMAGE_IMPL_H diff --git a/framework/algorithm/detail_enhancer_video/detail_enhancer_video_fwk.cpp b/framework/algorithm/detail_enhancer_video/detail_enhancer_video_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c7c90fc700dd82817f281de17dec8f43a64d6d8e --- /dev/null +++ b/framework/algorithm/detail_enhancer_video/detail_enhancer_video_fwk.cpp @@ -0,0 +1,206 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail_enhancer_video_fwk.h" + +#include + +#include "vpe_log.h" +#include "vpe_trace.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace std::chrono_literals; + +namespace { +constexpr int MAX_TARGET_WIDTH = 2000; +constexpr int MAX_TARGET_HEIGHT = 2000; +} // namespace + +std::shared_ptr DetailEnhancerVideoFwk::Create() +{ + auto obj = std::make_shared(VIDEO_TYPE_DETAIL_ENHANCER); + CHECK_AND_RETURN_RET_LOG(obj != nullptr, nullptr, "Failed to create detail enhancer!"); + CHECK_AND_RETURN_RET_LOG(obj->Initialize() == VPE_ALGO_ERR_OK, nullptr, "Failed to initialize detail enhancer!"); + return obj; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::SetParameter(const Format& parameter) +{ + std::function setters[] = { + [this] (const Format& parameter) { return SetLevel(parameter); }, + [this] (const Format& parameter) { return SetTargetSize(parameter); }, + [this] (const Format& parameter) { return SetAutoDownshift(parameter); }, + }; + + CHECK_AND_RETURN_RET_LOG(IsInitialized(), VPE_ALGO_ERR_INVALID_OPERATION, "NOT initialized!"); + + std::lock_guard lock(lock_); + int setCount = 0; + for (auto& setter : setters) { + int err = setter(parameter); + if (err == PARAM_ERR_INVALID) { + return VPE_ALGO_ERR_INVALID_VAL; + } + setCount += err; + } + CHECK_AND_RETURN_RET_LOG(setCount > 0, VPE_ALGO_ERR_INVALID_VAL, "Invalid input: NO valid parameters!"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::GetParameter([[maybe_unused]] Format& parameter) +{ + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::OnInitialize() +{ + detailEnh_ = DetailEnhancerImage::Create(VIDEO); + CHECK_AND_RETURN_RET_LOG(detailEnh_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Failed to create DetailEnhancer!"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::OnDeinitialize() +{ + detailEnh_ = nullptr; + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::Process(const sptr& sourceImage, + sptr& destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, VPE_ALGO_ERR_INVALID_VAL, + "Invalid input: source or destination image is null!"); + CHECK_AND_RETURN_RET_LOG(IsInitialized(), VPE_ALGO_ERR_INVALID_OPERATION, "NOT initialized!"); + auto ret = detailEnh_->Process(sourceImage, destinationImage); + VPE_LOGD("scale %{public}dx%{public}d -> %{public}dx%{public}d ret:%{public}d", + sourceImage->GetWidth(), sourceImage->GetHeight(), + destinationImage->GetWidth(), destinationImage->GetHeight(), ret); + std::lock_guard lock(lock_); + if (ret == VPE_ALGO_ERR_OK && (lastEffectiveLevel_ != level_ || + lastEffectiveSize_.width != destinationImage->GetWidth() || + lastEffectiveSize_.height != destinationImage->GetHeight())) { + VPE_LOGD("level:%{public}d->%{public}d size:%{public}dx%{public}d->%{public}dx%{public}d", + lastEffectiveLevel_, level_, lastEffectiveSize_.width, lastEffectiveSize_.height, + destinationImage->GetWidth(), destinationImage->GetHeight()); + Format fmt; + lastEffectiveLevel_ = level_; + fmt.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, lastEffectiveLevel_); + lastEffectiveSize_.width = destinationImage->GetWidth(); + lastEffectiveSize_.height = destinationImage->GetHeight(); + fmt.PutBuffer(ParameterKey::DETAIL_ENHANCER_TARGET_SIZE, reinterpret_cast(&lastEffectiveSize_), + sizeof(lastEffectiveSize_)); + OnOutputFormatChanged(fmt); + } + return ret; +} + +bool DetailEnhancerVideoFwk::IsProducerSurfaceValid([[maybe_unused]] const sptr& surface) +{ + // Check resolution for detail enhancer is valid or not + return true; +} + +VPEAlgoErrCode DetailEnhancerVideoFwk::UpdateRequestCfg(const sptr& surface, BufferRequestConfig& requestCfg) +{ + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_VAL, "surface is null!"); + + // If the target size is set, the target size takes precedence. + std::lock_guard lock(lock_); + if (size_.width != 0 && size_.height != 0) { + requestCfg.width = size_.width; + requestCfg.height = size_.height; + } else { + // Update buffer request configuration, use default resolution of output surface as the resize destination + // resolution when the request resolution is invalid. + requestCfg.width = surface->GetRequestWidth(); + requestCfg.height = surface->GetRequestHeight(); + if (requestCfg.width == 0 || requestCfg.height == 0) { + requestCfg.width = surface->GetDefaultWidth(); + requestCfg.height = surface->GetDefaultHeight(); + } + } + return VPE_ALGO_ERR_OK; +} + +void DetailEnhancerVideoFwk::UpdateRequestCfg(const sptr& consumerBuffer, + BufferRequestConfig& requestCfg) +{ + CHECK_AND_RETURN_LOG(consumerBuffer != nullptr, "surface buffer is null!"); + + if (size_.width != 0 && size_.height != 0) { + requestCfg.width = size_.width; + requestCfg.height = size_.height; + } else { + requestCfg.width = consumerBuffer->GetWidth(); + requestCfg.height = consumerBuffer->GetHeight(); + } + requestCfg.format = consumerBuffer->GetFormat(); +} + +DetailEnhancerVideoFwk::ParamError DetailEnhancerVideoFwk::SetLevel(const Format& parameter) +{ + int level; + if (!parameter.GetIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, level)) { + return PARAM_ERR_NOT_FOUND; + } + CHECK_AND_RETURN_RET_LOG(level >= DETAIL_ENHANCER_LEVEL_NONE && level <= DETAIL_ENHANCER_LEVEL_HIGH, + PARAM_ERR_INVALID, "Invalid input: level=%{public}d(Expected:[%{public}d,%{public}d])!", + level, DETAIL_ENHANCER_LEVEL_NONE, DETAIL_ENHANCER_LEVEL_HIGH); + VPE_LOGI("level:%{public}d->%{public}d", level_, level); + level_ = static_cast(level); + DetailEnhancerParameters param{}; + param.level = static_cast(level_); + CHECK_AND_RETURN_RET_LOG(detailEnh_->SetParameter(param) == VPE_ALGO_ERR_OK, PARAM_ERR_INVALID, + "Failed to set parameter(level:%{public}d) to detail enhancer!", param.level); + return PARAM_ERR_OK; +} + +DetailEnhancerVideoFwk::ParamError DetailEnhancerVideoFwk::SetTargetSize(const Format& parameter) +{ + size_t addrSize; + uint8_t* addr = nullptr; + if (!parameter.GetBuffer(ParameterKey::DETAIL_ENHANCER_TARGET_SIZE, &addr, addrSize)) { + return PARAM_ERR_NOT_FOUND; + } + CHECK_AND_RETURN_RET_LOG(addr != nullptr && addrSize == sizeof(VpeBufferSize), PARAM_ERR_INVALID, + "Invalid input: addr is null or addrSize=%{public}zu(Expected:%{public}zu)!", + addrSize, sizeof(VpeBufferSize)); + auto size = reinterpret_cast(addr); + CHECK_AND_RETURN_RET_LOG(size->width > 0 && size->width <= MAX_TARGET_WIDTH && + size->height > 0 && size->height <= MAX_TARGET_HEIGHT, PARAM_ERR_INVALID, + "Invalid input: target buffer size width=%{public}d(Expected:(0,%{public}d])" + " height=%{public}d(Expected:(0,%{public}d])!", + size->width, MAX_TARGET_WIDTH, size->height, MAX_TARGET_HEIGHT); + VPE_LOGI("target size:%{public}dx%{public}d->%{public}dx%{public}d", + size_.width, size_.height, size->width, size->height); + size_ = *size; + + RefreshBuffers(); + return PARAM_ERR_OK; +} + +DetailEnhancerVideoFwk::ParamError DetailEnhancerVideoFwk::SetAutoDownshift(const Format& parameter) +{ + int autoDownshift; + if (!parameter.GetIntValue(ParameterKey::DETAIL_ENHANCER_AUTO_DOWNSHIFT, autoDownshift)) { + return PARAM_ERR_NOT_FOUND; + } + CHECK_AND_RETURN_RET_LOG(autoDownshift == 0 || autoDownshift == 1, PARAM_ERR_INVALID, + "Invalid input: autoDownshift=%{public}d(Expected: 0 or 1)!", autoDownshift); + VPE_LOGI("auto downshift:%{public}d->%{public}d", isAutoDownshift_, (autoDownshift == 1)); + isAutoDownshift_ = (autoDownshift == 1); + return PARAM_ERR_OK; +} diff --git a/framework/algorithm/detail_enhancer_video/detail_enhancer_video_impl.cpp b/framework/algorithm/detail_enhancer_video/detail_enhancer_video_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b6978b4473aeb867ac4be43177c6686b727d1885 --- /dev/null +++ b/framework/algorithm/detail_enhancer_video/detail_enhancer_video_impl.cpp @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail_enhancer_video_impl.h" + +#include "vpe_log.h" +#include "vpe_trace.h" + +constexpr int MAX_URL_LENGTH = 100; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +std::shared_ptr DetailEnhancerVideo::Create() +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(); + CHECK_AND_RETURN_RET_LOG(ret == static_cast(VPE_ALGO_ERR_OK), nullptr, + "failed to init DetailEnhancerVideoImpl"); + return impl; +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::RegisterCallback(const std::shared_ptr& callback) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->RegisterCallback(callback); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::SetOutputSurface(const sptr& surface) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->SetOutputSurface(surface); +} + +sptr DetailEnhancerVideoImpl::GetInputSurface() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), nullptr, "Initialization was NOT successful!"); + return detailEnhancerVideo_->GetInputSurface(); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::SetParameter(const DetailEnhancerParameters& parameter, SourceType type) +{ + CHECK_AND_RETURN_RET_LOG(parameter.level >= DETAIL_ENH_LEVEL_NONE && parameter.level <= DETAIL_ENH_LEVEL_HIGH && + parameter.uri.length() < MAX_URL_LENGTH, VPE_ALGO_ERR_INVALID_VAL, "Invalid algo level"); + CHECK_AND_RETURN_RET_LOG(type == VIDEO, VPE_ALGO_ERR_INVALID_VAL, "Invalid source type"); + + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + + Format fmt; + CHECK_AND_RETURN_RET_LOG(fmt.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, parameter.level), + VPE_ALGO_ERR_INVALID_VAL, "Failed to put level(%{public}d) to format!", parameter.level); + return detailEnhancerVideo_->SetParameter(fmt); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::Start() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->Start(); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::Stop() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->Stop(); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::RenderOutputBuffer([[maybe_unused]] uint32_t index) +{ + return VPE_ALGO_ERR_OK; +} + +int32_t DetailEnhancerVideoImpl::Init() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VPE_ALGO_ERR_INVALID_STATE, "Already initialized!"); + detailEnhancerVideo_ = DetailEnhancerVideoFwk::Create(); + CHECK_AND_RETURN_RET_LOG(detailEnhancerVideo_ != nullptr, VPE_ALGO_ERR_UNKNOWN, + "Failed to create video detail enhancer!"); + isInitialized_ = true; + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::NotifyEos() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->NotifyEos(); +} + +VPEAlgoErrCode DetailEnhancerVideoImpl::ReleaseOutputBuffer(uint32_t index, bool render) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VPE_ALGO_ERR_INIT_FAILED, "Initialization was NOT successful!"); + return detailEnhancerVideo_->ReleaseOutputBuffer(index, render); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_fwk.h b/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..85e3274bded95e2c51e3abaaf09972e0fc2c1381 --- /dev/null +++ b/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_fwk.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_VIDEO_FWK_H +#define DETAIL_ENHANCER_VIDEO_FWK_H + +#include +#include + +#include "algorithm_video_impl.h" +#include "detail_enhancer_image_fwk.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class DetailEnhancerVideoFwk : public VpeVideoImpl { +public: + static std::shared_ptr Create(); + + explicit DetailEnhancerVideoFwk(uint32_t type) : VpeVideoImpl(type) {} + virtual ~DetailEnhancerVideoFwk() = default; + DetailEnhancerVideoFwk(const DetailEnhancerVideoFwk&) = delete; + DetailEnhancerVideoFwk& operator=(const DetailEnhancerVideoFwk&) = delete; + DetailEnhancerVideoFwk(DetailEnhancerVideoFwk&&) = delete; + DetailEnhancerVideoFwk& operator=(DetailEnhancerVideoFwk&&) = delete; + + VPEAlgoErrCode SetParameter(const Format& parameter) final; + VPEAlgoErrCode GetParameter(Format& parameter) final; + +protected: + VPEAlgoErrCode OnInitialize() final; + VPEAlgoErrCode OnDeinitialize() final; + VPEAlgoErrCode Process(const sptr& sourceImage, sptr& destinationImage) final; + bool IsProducerSurfaceValid(const sptr& surface) final; + VPEAlgoErrCode UpdateRequestCfg(const sptr& surface, BufferRequestConfig& requestCfg) final; + void UpdateRequestCfg(const sptr& consumerBuffer, BufferRequestConfig& requestCfg) final; + +private: + enum ParamError { + PARAM_ERR_INVALID = -1, + PARAM_ERR_NOT_FOUND = 0, + PARAM_ERR_OK = 1, + }; + + ParamError SetLevel(const Format& parameter); + ParamError SetTargetSize(const Format& parameter); + ParamError SetAutoDownshift(const Format& parameter); + + std::mutex lock_{}; + // Guarded by lock_ begin + DetailEnhancerQualityLevel level_{DETAIL_ENHANCER_LEVEL_LOW}; + VpeBufferSize size_{}; + DetailEnhancerQualityLevel lastEffectiveLevel_{}; + VpeBufferSize lastEffectiveSize_{}; + bool isAutoDownshift_{true}; + // Guarded by lock_ end + + std::shared_ptr detailEnh_{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_VIDEO_FWK_H diff --git a/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_impl.h b/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..56c549c154c8a2c661d69c0d4e0b76e368af45a7 --- /dev/null +++ b/framework/algorithm/detail_enhancer_video/include/detail_enhancer_video_impl.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_VIDEO_IMPL_H +#define DETAIL_ENHANCER_VIDEO_IMPL_H + +#include +#include +#include + +#include "algorithm_video_common.h" +#include "detail_enhancer_video.h" +#include "detail_enhancer_video_fwk.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class DetailEnhancerVideoImpl : public DetailEnhancerVideo { +public: + DetailEnhancerVideoImpl() = default; + ~DetailEnhancerVideoImpl() = default; + + int32_t Init(); + + VPEAlgoErrCode RegisterCallback(const std::shared_ptr& callback) override; + VPEAlgoErrCode SetOutputSurface(const sptr& surface) override; + sptr GetInputSurface() override; + VPEAlgoErrCode SetParameter(const DetailEnhancerParameters& parameter, SourceType type) override; + VPEAlgoErrCode Start() override; + VPEAlgoErrCode Stop() override; + VPEAlgoErrCode RenderOutputBuffer(uint32_t index) override; + VPEAlgoErrCode ReleaseOutputBuffer(uint32_t index, bool render) override; + + VPEAlgoErrCode NotifyEos() override; + +private: + std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr detailEnhancerVideo_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_VIDEO_IMPL_H diff --git a/framework/algorithm/extension_manager/extension_manager.cpp b/framework/algorithm/extension_manager/extension_manager.cpp new file mode 100644 index 0000000000000000000000000000000000000000..29ea8e2d91480c568e6a04930d8659e3dd5fcb68 --- /dev/null +++ b/framework/algorithm/extension_manager/extension_manager.cpp @@ -0,0 +1,700 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "extension_manager.h" +#include +#include +#include +#include +#include +#include "static_extension_list.h" +#include "vpe_log.h" +namespace { + using LibFunctionGetRegisters = std::unordered_map* (*)(); + LibFunctionGetRegisters GetRegisterExtensionFuncs{nullptr}; + LibFunctionGetRegisters GetRegisterMetdataGenExtensionFuncs{nullptr}; + LibFunctionGetRegisters GetRegisterComposeExtensionFuncs{nullptr}; + LibFunctionGetRegisters GetRegisterDecomposeExtensionFuncs{nullptr}; + using LibFunctionGetRegisterFunction = OHOS::Media::VideoProcessingEngine::Extension::RegisterExtensionFunc (*)(); + LibFunctionGetRegisterFunction GetRegisterVRRExtensionFuncs{nullptr}; + void* g_algoHandle{nullptr}; +} // namespace + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { + +ExtensionManager& ExtensionManager::GetInstance() +{ + static ExtensionManager instance; + return instance; +} + +ExtensionManager::ExtensionManager() +{ + (void)Init(); + g_algoHandle = dlopen("libvideoprocessingengine_ext.z.so", RTLD_NOW); +} + +ExtensionManager::~ExtensionManager() +{ + if (g_algoHandle != nullptr) { + dlclose(g_algoHandle); + g_algoHandle = nullptr; + } +} + +VPEAlgoErrCode ExtensionManager::Init() +{ + if (initialized_) { + return VPE_ALGO_ERR_OK; + } + initialized_ = true; + return VPE_ALGO_ERR_OK; +} + +void ExtensionManager::IncreaseInstance() +{ + std::lock_guard lock(instanceCountMtx_); + if (usedInstance_ == 0 && g_algoHandle == nullptr) { + g_algoHandle = dlopen("libvideoprocessingengine_ext.z.so", RTLD_NOW); + } + usedInstance_++; +} + +void ExtensionManager::DecreaseInstance() +{ + std::lock_guard lock(instanceCountMtx_); + usedInstance_--; + if ((usedInstance_ == 0) && (g_algoHandle != nullptr)) { + dlclose(g_algoHandle); + g_algoHandle = nullptr; + } +} + +bool ExtensionManager::IsColorSpaceConversionSupported(const FrameInfo &inputInfo, const FrameInfo &outputInfo) const +{ + if (!initialized_) { + return false; + } + return FindColorSpaceConverterExtension(inputInfo, outputInfo) == nullptr; +} + +std::shared_ptr ExtensionManager::CreateColorSpaceConverter(const FrameInfo &inputInfo, + const FrameInfo &outputInfo, Extension::ExtensionInfo &extensionInfo) const +{ + CHECK_AND_RETURN_RET_LOG(initialized_ == true, nullptr, "Not initialized"); + auto extension = FindColorSpaceConverterExtension(inputInfo, outputInfo); + CHECK_AND_RETURN_RET_LOG(extension != nullptr, nullptr, "Create extension failed, get an empty extension"); + auto impl = extension->creator(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, + "Call extension creator failed, return a empty impl, extension: %{public}s", extension->info.name.c_str()); + extensionInfo = extension->info; + return impl; +} + +std::shared_ptr ExtensionManager::CreateVideoRefreshRatePredictor() const +{ + std::shared_ptr extension; + CHECK_AND_RETURN_RET_LOG(g_algoHandle != nullptr, {}, "dlopen ext fail!"); + GetRegisterVRRExtensionFuncs = reinterpret_cast + (dlsym(g_algoHandle, "GetRegisterVRRExtensionFuncs")); + CHECK_AND_RETURN_RET_LOG(GetRegisterVRRExtensionFuncs != nullptr, {}, "dlsym Get VRR Extension fail!"); + + auto registerFunctionPtr = GetRegisterVRRExtensionFuncs(); + CHECK_AND_RETURN_RET_LOG(registerFunctionPtr != nullptr, {}, "get GetRegisterVRRExtensionFuncs fail!!"); + registerFunctionPtr(reinterpret_cast(&extension)); + std::shared_ptr vrrExtension = + std::static_pointer_cast(extension); + auto impl = vrrExtension->creator(); + return impl; +} + +ColorSpaceConverterDisplaySet ExtensionManager::CreateColorSpaceConverterDisplay() const +{ + CHECK_AND_RETURN_RET_LOG(initialized_ == true, {}, "Not initialized"); + + ColorSpaceConverterDisplaySet impl; + for (const auto &extension : FindColorSpaceConverterDisplayExtension()) { + auto temp = extension->creator(); + CHECK_AND_RETURN_RET_LOG(temp != nullptr, {}, "Create failed, extension: %{public}s", + extension->info.name.c_str()); + impl.emplace(std::move(temp)); + } + return impl; +} + +std::shared_ptr ExtensionManager::CreateMetadataGenerator(const FrameInfo &inputInfo, + Extension::ExtensionInfo &extensionInfo, MetadataGeneratorAlgoType algoType) const +{ + CHECK_AND_RETURN_RET_LOG(initialized_ == true, nullptr, "Not initialized"); + auto extension = FindMetadataGeneratorExtension(inputInfo, algoType); + CHECK_AND_RETURN_RET_LOG(extension != nullptr, nullptr, "Create failed, get an empty extension"); + auto impl = extension->creator(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, + "Call extension creator failed, return a empty impl, extension: %{public}s", extension->info.name.c_str()); + extensionInfo = extension->info; + return impl; +} + +std::shared_ptr ExtensionManager::CreateAihdrEnhancer(const FrameInfo &inputInfo, + Extension::ExtensionInfo &extensionInfo) const +{ + CHECK_AND_RETURN_RET_LOG(initialized_ == true, nullptr, "Not initialized"); + auto extension = FindAihdrEnhancerExtension(inputInfo); + CHECK_AND_RETURN_RET_LOG(extension != nullptr, nullptr, "Create failed, get an empty extension"); + auto impl = extension->creator(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, + "Call extension creator failed, return a empty impl, extension: %{public}s", extension->info.name.c_str()); + extensionInfo = extension->info; + return impl; +} + +int32_t ExtensionManager::NewInstanceId(const ExtensionManager::InstanceVariableType& instance) +{ + std::lock_guard lock(instanceManagementMtx_); + int32_t newId = currentId_; + do { + currentId_ = (currentId_ == MAX_INSTANCE_NUM) ? 0 : currentId_; + if (instanceList_[currentId_] == std::nullopt) { + instanceList_[currentId_] = instance; + newId = currentId_; + ++currentId_; + return newId; + } + ++currentId_; + } while (currentId_ != newId); + + return -1; +} + +int32_t ExtensionManager::RemoveInstanceReference(int32_t& id) +{ + std::lock_guard lock(instanceManagementMtx_); + CHECK_AND_RETURN_RET_LOG(id >= 0 && id < MAX_INSTANCE_NUM, VPE_ALGO_ERR_INVALID_VAL, "invalid instance id"); + instanceList_[id].reset() ; + id = -1; + + return VPE_ALGO_ERR_OK; +} + +std::optional ExtensionManager::GetInstance(int32_t id) +{ + std::lock_guard lock(instanceManagementMtx_); + CHECK_AND_RETURN_RET_LOG(id >= 0 && id < MAX_INSTANCE_NUM, std::nullopt, "invalid instance id"); + return instanceList_[id]; +} + +std::shared_ptr ExtensionManager::CreateDetailEnhancer(uint32_t level) const +{ + CHECK_AND_RETURN_RET_LOG(initialized_ == true, nullptr, "Not initialized"); + auto extension = FindDetailEnhancerExtension(level); + CHECK_AND_RETURN_RET_LOG(extension != nullptr, nullptr, + "Create failed, get an empty extension. level: %{public}d", level); + auto impl = extension->creator(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, + "Call extension creator failed, return a empty impl, extension: %{public}s, level: %{public}d", + extension->info.name.c_str(), level); + return impl; +} + +ExtensionList ExtensionManager::LoadExtensions() const +{ + ExtensionList extensionList {}; + VPEAlgoErrCode ret = LoadStaticExtensions(extensionList); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, {}, "Load extension failed"); + LoadDynamicExtensions(extensionList); + return extensionList; +} + +ExtensionList ExtensionManager::LoadStaticImageExtensions( + const std::unordered_map staticListRegisterMap) const +{ + ExtensionList extensionList {}; + for (auto ® : staticListRegisterMap) { + CHECK_AND_RETURN_RET_LOG(reg.second != nullptr, {}, + "Get an empty register, extension: %{public}s", reg.first.c_str()); + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + return extensionList; +} + +bool ExtensionManager::FindImageConverterExtension( + const FrameInfo &inputInfo, const FrameInfo &outputInfo) const +{ + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), false, "No extension found"); + auto colorSpaceConverterCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!colorSpaceConverterCapabilityMap.empty(), false, "No extension available"); + auto key = + std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, outputInfo.colorSpace, outputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(outputInfo.colorSpace.colorSpaceInfo, outputInfo.pixelFormat); + const auto iter = colorSpaceConverterCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != colorSpaceConverterCapabilityMap.cend() && !iter->second.empty(), false, + "CSC Extension is not found"); + return true; +} + +bool ExtensionManager::FindImageComposeExtension( + const FrameInfo &inputInfo, const FrameInfo &outputInfo) const +{ + auto extensionList = LoadDynamicComposeExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), false, "No extension found"); + auto colorSpaceConverterCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!colorSpaceConverterCapabilityMap.empty(), false, "No extension available"); + auto key = + std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, outputInfo.colorSpace, outputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(outputInfo.colorSpace.colorSpaceInfo, outputInfo.pixelFormat); + const auto iter = colorSpaceConverterCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != colorSpaceConverterCapabilityMap.cend() && !iter->second.empty(), false, + "CSC Extension is not found"); + return true; +} + +bool ExtensionManager::FindImageDecomposeExtension( + const FrameInfo &inputInfo, const FrameInfo &outputInfo) const +{ + auto extensionList = LoadDynamicDecomposeExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), false, "No extension found"); + auto colorSpaceConverterCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!colorSpaceConverterCapabilityMap.empty(), false, "No extension available"); + auto key = + std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, outputInfo.colorSpace, outputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(outputInfo.colorSpace.colorSpaceInfo, outputInfo.pixelFormat); + const auto iter = colorSpaceConverterCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != colorSpaceConverterCapabilityMap.cend() && !iter->second.empty(), false, + "CSC Extension is not found"); + return true; +} + +bool ExtensionManager::FindImageMetadataGenExtension(const FrameInfo &inputInfo) const +{ + auto extensionList = LoadDynamicMetadataGenExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), false, "No extension found"); + auto metadataGeneratorCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!metadataGeneratorCapabilityMap.empty(), false, "No extension available"); + auto key = std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, + MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE); + VPE_LOG_PRINT_METADATA_GEN_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat, + MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE); + const auto iter = metadataGeneratorCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != metadataGeneratorCapabilityMap.cend() && !iter->second.empty(), false, + "CSC metadata generator extension is not found"); + return true; +} + +bool ImageProcessing_IsColorSpaceConversionSupported(const ColorSpaceInfo inputInfo, const ColorSpaceInfo outputInfo) +{ + FrameInfo inputFrameInfo; + inputFrameInfo.colorSpace = {GetColorSpaceInfo(inputInfo.colorSpace), inputInfo.metadataType}; + inputFrameInfo.pixelFormat = inputInfo.pixelFormat; + + FrameInfo outputFrameInfo; + outputFrameInfo.colorSpace = {GetColorSpaceInfo(outputInfo.colorSpace), outputInfo.metadataType}; + outputFrameInfo.pixelFormat = outputInfo.pixelFormat; + ExtensionManager::GetInstance().IncreaseInstance(); + auto flag = ExtensionManager::GetInstance().FindImageConverterExtension(inputFrameInfo, outputFrameInfo); + ExtensionManager::GetInstance().DecreaseInstance(); + return flag; +} + +bool ImageProcessing_IsCompositionSupported(const ColorSpaceInfo inputInfo, const ColorSpaceInfo outputInfo) +{ + FrameInfo inputFrameInfo; + inputFrameInfo.colorSpace = {GetColorSpaceInfo(inputInfo.colorSpace), inputInfo.metadataType}; + inputFrameInfo.pixelFormat = inputInfo.pixelFormat; + + FrameInfo outputFrameInfo; + outputFrameInfo.colorSpace = {GetColorSpaceInfo(outputInfo.colorSpace), outputInfo.metadataType}; + outputFrameInfo.pixelFormat = outputInfo.pixelFormat; + ExtensionManager::GetInstance().IncreaseInstance(); + auto flag = ExtensionManager::GetInstance().FindImageComposeExtension(inputFrameInfo, outputFrameInfo); + ExtensionManager::GetInstance().DecreaseInstance(); + return flag; +} + +bool ImageProcessing_IsDecompositionSupported(const ColorSpaceInfo inputInfo, const ColorSpaceInfo outputInfo) +{ + FrameInfo inputFrameInfo; + inputFrameInfo.colorSpace = {GetColorSpaceInfo(inputInfo.colorSpace), inputInfo.metadataType}; + inputFrameInfo.pixelFormat = inputInfo.pixelFormat; + + FrameInfo outputFrameInfo; + outputFrameInfo.colorSpace = {GetColorSpaceInfo(outputInfo.colorSpace), outputInfo.metadataType}; + outputFrameInfo.pixelFormat = outputInfo.pixelFormat; + ExtensionManager::GetInstance().IncreaseInstance(); + auto flag = ExtensionManager::GetInstance().FindImageDecomposeExtension(inputFrameInfo, outputFrameInfo); + ExtensionManager::GetInstance().DecreaseInstance(); + return flag; +} + +bool ImageProcessing_IsMetadataGenerationSupported(const ColorSpaceInfo inputInfo) +{ + FrameInfo inputFrameInfo; + inputFrameInfo.colorSpace = {GetColorSpaceInfo(inputInfo.colorSpace), inputInfo.metadataType}; + inputFrameInfo.pixelFormat = inputInfo.pixelFormat; + ExtensionManager::GetInstance().IncreaseInstance(); + auto flag = ExtensionManager::GetInstance().FindImageMetadataGenExtension(inputFrameInfo); + ExtensionManager::GetInstance().DecreaseInstance(); + return flag; +} + +VPEAlgoErrCode ExtensionManager::LoadStaticExtensions(ExtensionList& extensionList) const +{ + for (auto ® : staticExtensionsRegisterMap) { + CHECK_AND_RETURN_RET_LOG(reg.second != nullptr, VPE_ALGO_ERR_UNKNOWN, + "Get an empty register, extension: %{public}s", reg.first.c_str()); + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + return VPE_ALGO_ERR_OK; +} + +void ExtensionManager::LoadDynamicExtensions(ExtensionList& extensionList) const +{ + CHECK_AND_RETURN_LOG(g_algoHandle != nullptr, "dlopen ext fail!"); + GetRegisterExtensionFuncs = reinterpret_cast + (dlsym(g_algoHandle, "GetRegisterExtensionFuncs")); + CHECK_AND_RETURN_LOG(GetRegisterExtensionFuncs != nullptr, "dlsym GetRegisterExtensionFuncs fail!"); + auto dynamicExtensionsRegisterMapPtr = GetRegisterExtensionFuncs(); + CHECK_AND_RETURN_LOG(dynamicExtensionsRegisterMapPtr != nullptr, "get dynamicExtensionsRegisterMap fail!!"); + auto dynamicExtensionsRegisterMap = *dynamicExtensionsRegisterMapPtr; + for (auto ® : dynamicExtensionsRegisterMap) { + if (reg.second != nullptr) { + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + } +} + +ExtensionList ExtensionManager::LoadDynamicMetadataGenExtensions() const +{ + ExtensionList extensionList {}; + CHECK_AND_RETURN_RET_LOG(g_algoHandle != nullptr, {}, "dlopen ext fail!"); + + GetRegisterMetdataGenExtensionFuncs = reinterpret_cast + (dlsym(g_algoHandle, "GetRegisterMetdataGenExtensionFuncs")); + CHECK_AND_RETURN_RET_LOG(GetRegisterMetdataGenExtensionFuncs != nullptr, {}, + "dlsym GetRegisterMetdataGenExtensionFuncs fail!"); + + auto dynamicExtensionsRegisterMapPtr = GetRegisterMetdataGenExtensionFuncs(); + CHECK_AND_RETURN_RET_LOG(dynamicExtensionsRegisterMapPtr != nullptr, {}, + "get dynamicMetadataGenExtensions fail!!"); + auto dynamicExtensionsRegisterMap = *dynamicExtensionsRegisterMapPtr; + for (auto ® : dynamicExtensionsRegisterMap) { + if (reg.second != nullptr) { + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + } + return extensionList; +} + +ExtensionList ExtensionManager::LoadDynamicComposeExtensions() const +{ + ExtensionList extensionList {}; + CHECK_AND_RETURN_RET_LOG(g_algoHandle != nullptr, {}, "dlopen ext fail!"); + + GetRegisterComposeExtensionFuncs = reinterpret_cast + (dlsym(g_algoHandle, "GetRegisterComposeExtensionFuncs")); + CHECK_AND_RETURN_RET_LOG(GetRegisterComposeExtensionFuncs != nullptr, {}, + "dlsym GetRegisterComposeExtensionFuncs fail!"); + + auto dynamicExtensionsRegisterMapPtr = GetRegisterComposeExtensionFuncs(); + CHECK_AND_RETURN_RET_LOG(dynamicExtensionsRegisterMapPtr != nullptr, {}, + "get dynamicMetadataGenExtensions fail!!"); + auto dynamicExtensionsRegisterMap = *dynamicExtensionsRegisterMapPtr; + for (auto ® : dynamicExtensionsRegisterMap) { + if (reg.second != nullptr) { + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + } + return extensionList; +} + +ExtensionList ExtensionManager::LoadDynamicDecomposeExtensions() const +{ + ExtensionList extensionList {}; + CHECK_AND_RETURN_RET_LOG(g_algoHandle != nullptr, {}, "dlopen ext fail, g_algoHandle null!"); + + GetRegisterDecomposeExtensionFuncs = reinterpret_cast + (dlsym(g_algoHandle, "GetRegisterDecomposeExtensionFuncs")); + CHECK_AND_RETURN_RET_LOG(GetRegisterDecomposeExtensionFuncs != nullptr, {}, + "dlsym GetRegisterDecomposeExtensionFuncs fail!"); + + auto dynamicExtensionsRegisterMapPtr = GetRegisterDecomposeExtensionFuncs(); + CHECK_AND_RETURN_RET_LOG(dynamicExtensionsRegisterMapPtr != nullptr, {}, + "get dynamicMetadataGenExtensions fail!!"); + auto dynamicExtensionsRegisterMap = *dynamicExtensionsRegisterMapPtr; + for (auto ® : dynamicExtensionsRegisterMap) { + if (reg.second != nullptr) { + VPE_LOGD("Load extension set: %{public}s", reg.first.c_str()); + reg.second(reinterpret_cast(&extensionList)); + } + } + return extensionList; +} + +template +T ExtensionManager::BuildCaps(const ExtensionList& extensionList) const +{ + VPEAlgoErrCode err = VPE_ALGO_ERR_OK; + T capMap {}; + for (size_t idx = 0; idx < extensionList.size(); ++idx) { + auto extension = extensionList[idx]; + CHECK_AND_RETURN_RET_LOG(extension != nullptr, {}, "Extension is nullptr"); + if constexpr (std::is_same_v) { + if (extension->info.type == ExtensionType::COLORSPACE_CONVERTER) { + err = BuildColorSpaceConverterCaps(extension, idx, capMap); + CHECK_AND_LOG(err == VPE_ALGO_ERR_OK, "Build caps failed, extension: %{public}s", + extension->info.name.c_str()); + } + } else if constexpr (std::is_same_v) { + if (extension->info.type == ExtensionType::COLORSPACE_CONVERTER_DISPLAY) { + err = BuildColorSpaceConverterDisplayCaps(extension, idx, capMap); + CHECK_AND_LOG(err == VPE_ALGO_ERR_OK, "Build caps failed, extension: %{public}s", + extension->info.name.c_str()); + } + } else if constexpr (std::is_same_v) { + if (extension->info.type == ExtensionType::METADATA_GENERATOR) { + err = BuildMetadataGeneratorCaps(extension, idx, capMap); + CHECK_AND_LOG(err == VPE_ALGO_ERR_OK, "Build caps failed, extension: %{public}s", + extension->info.name.c_str()); + } + } else if constexpr (std::is_same_v) { + if (extension->info.type == ExtensionType::DETAIL_ENHANCER) { + err = BuildDetailEnhancerCaps(extension, idx, capMap); + CHECK_AND_LOG(err == VPE_ALGO_ERR_OK, "Build caps failed, extension: %{public}s", + extension->info.name.c_str()); + } + } else if constexpr (std::is_same_v) { + if (extension->info.type == ExtensionType::AIHDR_ENHANCER) { + err = BuildAihdrEnhancerCaps(extension, idx, capMap); + CHECK_AND_LOG(err == VPE_ALGO_ERR_OK, "Build caps failed, extension: %{public}s", + extension->info.name.c_str()); + } + } else { + VPE_LOGE("Unknown extension type"); + return {}; + } + } + + return capMap; +} + +std::shared_ptr ExtensionManager::FindColorSpaceConverterExtension( + const FrameInfo &inputInfo, const FrameInfo &outputInfo) const +{ + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(outputInfo.colorSpace.colorSpaceInfo, outputInfo.pixelFormat); + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), nullptr, "No extension found"); + auto colorSpaceConverterCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!colorSpaceConverterCapabilityMap.empty(), nullptr, "No extension available"); + auto key = + std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, outputInfo.colorSpace, outputInfo.pixelFormat); + const auto iter = colorSpaceConverterCapabilityMap.find(key); + if (iter == colorSpaceConverterCapabilityMap.cend() || iter->second.empty()) { + VPE_LOGE("CSC Extension is not found"); + CM_ColorSpaceInfo CSDesc = inputInfo.colorSpace.colorSpaceInfo; + VPE_LOGE("input: (primary=%{public}3d,trans=%{public}3d,mat=%{public}3d,range=%{public}3d,"\ + "pixel=%{public}3d,metadata=%{public}3d", (CSDesc).primaries, (CSDesc).transfunc, (CSDesc).matrix, + (CSDesc).range, inputInfo.pixelFormat, inputInfo.colorSpace.metadataType); + CSDesc = outputInfo.colorSpace.colorSpaceInfo; + VPE_LOGE("output: (primary=%{public}3d,trans=%{public}3d,mat=%{public}3d,range=%{public}3d,"\ + "pixel=%{public}3d,metadata=%{public}3d", (CSDesc).primaries, (CSDesc).transfunc, (CSDesc).matrix, + (CSDesc).range, outputInfo.pixelFormat, outputInfo.colorSpace.metadataType); + return nullptr; + } + size_t idx = std::get<2>(*(iter->second.cbegin())); + return std::static_pointer_cast(extensionList[idx]); +} + +ColorSpaceConverterDisplayExtensionSet ExtensionManager::FindColorSpaceConverterDisplayExtension() const +{ + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), {}, "No extension found"); + ColorSpaceConverterDisplayExtensionSet extensions {}; + for (const auto &extension : extensionList) { + CHECK_AND_CONTINUE_LOG(extension != nullptr, "Get an empty extension"); + if (extension->info.type != ExtensionType::COLORSPACE_CONVERTER_DISPLAY) { + continue; + } + extensions.emplace(std::static_pointer_cast(extension)); + } + return extensions; +} + +std::shared_ptr ExtensionManager::FindMetadataGeneratorExtension(const FrameInfo &inputInfo, + MetadataGeneratorAlgoType algoType) const +{ + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), nullptr, "No extension found"); + auto metadataGeneratorCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!metadataGeneratorCapabilityMap.empty(), nullptr, "No extension available"); + auto key = std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat, algoType); + VPE_LOG_PRINT_METADATA_GEN_CAPBILITY(inputInfo.colorSpace.colorSpaceInfo, inputInfo.pixelFormat, algoType); + const auto iter = metadataGeneratorCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != metadataGeneratorCapabilityMap.cend() && !iter->second.empty(), nullptr, + "CSC metadata generator extension is not found"); + size_t idx = std::get<2>(*(iter->second.cbegin())); + return std::static_pointer_cast(extensionList[idx]); +} + +VPEAlgoErrCode ExtensionManager::BuildColorSpaceConverterCaps(const std::shared_ptr &ext, size_t idx, + ColorSpaceConverterCapabilityMap& colorSpaceConverterCapabilityMap) const +{ + VPEAlgoErrCode err = VPE_ALGO_ERR_OK; + auto realExtension = std::static_pointer_cast(ext); + auto capabilities = realExtension->capabilitiesBuilder(); + for (const auto &cap : capabilities) { + err = ExtractColorSpaceConverterCap(cap, idx, colorSpaceConverterCapabilityMap); + } + return err; +} + +std::shared_ptr ExtensionManager::FindDetailEnhancerExtension(uint32_t level) const +{ + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), nullptr, "No extension found"); + auto detailEnhancerCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!detailEnhancerCapabilityMap.empty(), nullptr, "No extension available"); + const auto iter = detailEnhancerCapabilityMap.find(level); + CHECK_AND_RETURN_RET_LOG(iter != detailEnhancerCapabilityMap.cend(), nullptr, + "Detail enhancer Extension is not found"); + size_t idx = iter->second; + return std::static_pointer_cast(extensionList[idx]); +} + +std::shared_ptr ExtensionManager::FindAihdrEnhancerExtension(const FrameInfo &inputInfo) const +{ + auto extensionList = LoadExtensions(); + CHECK_AND_RETURN_RET_LOG(!extensionList.empty(), nullptr, "No extension found"); + auto aihdrEnhancerCapabilityMap = BuildCaps(extensionList); + CHECK_AND_RETURN_RET_LOG(!aihdrEnhancerCapabilityMap.empty(), nullptr, "No extension available"); + auto key = std::make_tuple(inputInfo.colorSpace, inputInfo.pixelFormat); + const auto iter = aihdrEnhancerCapabilityMap.find(key); + CHECK_AND_RETURN_RET_LOG(iter != aihdrEnhancerCapabilityMap.cend() && !iter->second.empty(), nullptr, + "Aihdr enhancer extension is not found"); + size_t idx = std::get<2>(*(iter->second.cbegin())); + return std::static_pointer_cast(extensionList[idx]); +} + +VPEAlgoErrCode ExtensionManager::ExtractColorSpaceConverterCap(const ColorSpaceConverterCapability& cap, size_t idx, + ColorSpaceConverterCapabilityMap& colorSpaceConverterCapabilityMap) const +{ + auto inputColorSpaceDesc = cap.inputColorSpaceDesc; + auto outputColorSpaceDesc = cap.outputColorSpaceDesc; + uint32_t rank = cap.rank; + int32_t version = cap.version; + for (const auto &[inputPixelFormat, outputPixelFormats] : cap.pixelFormatMap) { + for (const auto &outputPixelFormat : outputPixelFormats) { + auto key = std::make_tuple(inputColorSpaceDesc, inputPixelFormat, outputColorSpaceDesc, outputPixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(inputColorSpaceDesc.colorSpaceInfo, inputPixelFormat); + VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(outputColorSpaceDesc.colorSpaceInfo, outputPixelFormat); + auto value = std::make_tuple(rank, version, idx); + colorSpaceConverterCapabilityMap[key].push_back(value); + } + } + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ExtensionManager::BuildColorSpaceConverterDisplayCaps(const std::shared_ptr &ext, + size_t idx, ColorSpaceConverterDisplayCapabilityMap& colorSpaceConverterDisplayCapabilityMap) const +{ + (void)ext; + (void)idx; + (void)colorSpaceConverterDisplayCapabilityMap; + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ExtensionManager::ExtractMetadataGeneratorCap(const MetadataGeneratorCapability &cap, size_t idx, + MetadataGeneratorAlgoType algoType, MetadataGeneratorCapabilityMap& metadataGeneratorCapabilityMap) const +{ + auto colorSpaceDesc = cap.colorspaceDesc; + uint32_t rank = cap.rank; + int32_t version = cap.version; + for (const auto &pixelFormat : cap.pixelFormats) { + auto key = std::make_tuple(colorSpaceDesc, pixelFormat, algoType); + VPE_LOG_PRINT_METADATA_GEN_CAPBILITY(colorSpaceDesc.colorSpaceInfo, pixelFormat, algoType); + auto value = std::make_tuple(rank, version, idx); + metadataGeneratorCapabilityMap[key].push_back(value); + } + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ExtensionManager::BuildMetadataGeneratorCaps(const std::shared_ptr &ext, size_t idx, + MetadataGeneratorCapabilityMap& metadataGeneratorCapabilityMap) const +{ + VPEAlgoErrCode err = VPE_ALGO_ERR_OK; + auto realExtension = std::static_pointer_cast(ext); + auto capabilities = realExtension->capabilitiesBuilder(); + MetadataGeneratorAlgoType algoType = MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE; + if (ext->info.name == "VideoMetadataGen") { + algoType = MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_VIDEO; + } + for (const auto &cap : capabilities) { + err = ExtractMetadataGeneratorCap(cap, idx, algoType, metadataGeneratorCapabilityMap); + } + return err; +} + +VPEAlgoErrCode ExtensionManager::BuildDetailEnhancerCaps(const std::shared_ptr& ext, size_t idx, + DetailEnhancerCapabilityMap& detailEnhancerCapabilityMap) const +{ + auto realExtension = std::static_pointer_cast(ext); + auto capabilities = realExtension->capabilitiesBuilder(); + for (const auto &level : capabilities.levels) { + detailEnhancerCapabilityMap.emplace(level, idx); + } + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ExtensionManager::ExtractAihdrEnhancerCap(const AihdrEnhancerCapability &cap, size_t idx, + AihdrEnhancerCapabilityMap& aihdrEnhancerCapabilityMap) const +{ + auto colorSpaceDesc = cap.colorspaceDesc; + uint32_t rank = cap.rank; + int32_t version = cap.version; + for (const auto &pixelFormat : cap.pixelFormats) { + auto key = std::make_tuple(colorSpaceDesc, pixelFormat); + auto value = std::make_tuple(rank, version, idx); + aihdrEnhancerCapabilityMap[key].push_back(value); + } + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode ExtensionManager::BuildAihdrEnhancerCaps(const std::shared_ptr &ext, size_t idx, + AihdrEnhancerCapabilityMap& aihdrEnhancerCapabilityMap) const +{ + VPEAlgoErrCode err = VPE_ALGO_ERR_OK; + auto realExtension = std::static_pointer_cast(ext); + auto capabilities = realExtension->capabilitiesBuilder(); + for (const auto &cap : capabilities) { + err = ExtractAihdrEnhancerCap(cap, idx, aihdrEnhancerCapabilityMap); + } + return err; +} +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/extension_manager/include/aihdr_enhancer_extension.h b/framework/algorithm/extension_manager/include/aihdr_enhancer_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..909e8803038d9b2e90dc2bb465d0a2f1d852c7ee --- /dev/null +++ b/framework/algorithm/extension_manager/include/aihdr_enhancer_extension.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_AIHDR_ENHANCER_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_AIHDR_ENHANCER_EXTENSION_H + +#include + +#include "aihdr_enhancer_base.h" +#include "aihdr_enhancer_capability.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +struct AihdrEnhancerExtension : public ExtensionBase { + AihdrEnhancerCreator creator; + AihdrEnhancerCapabilitiesBuilder capabilitiesBuilder; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_AIHDR_ENHANCER_EXTENSION_H diff --git a/framework/algorithm/extension_manager/include/colorspace_converter_display_extension.h b/framework/algorithm/extension_manager/include/colorspace_converter_display_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..521e6942a1a2d3c7b262093c2b92aecc82230d34 --- /dev/null +++ b/framework/algorithm/extension_manager/include/colorspace_converter_display_extension.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_DISPLAY_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_DISPLAY_EXTENSION_H + +#include +#include "colorspace_converter_display_base.h" +#include "colorspace_converter_display_capability.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +struct ColorSpaceConverterDisplayExtension : public ExtensionBase { + ColorSpaceConverterDisplayCreator creator; + ColorSpaceConverterDisplayCapabilitiesBuilder capabilitiesBuilder; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_DISPLAY_EXTENSION_H diff --git a/framework/algorithm/extension_manager/include/colorspace_converter_extension.h b/framework/algorithm/extension_manager/include/colorspace_converter_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..f3feaf895e3461e4e2996894e1d838f5d325d3d5 --- /dev/null +++ b/framework/algorithm/extension_manager/include/colorspace_converter_extension.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_EXTENSION_H + +#include +#include "colorspace_converter_base.h" +#include "colorspace_converter_capability.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +struct ColorSpaceConverterExtension : public ExtensionBase { + ColorSpaceConverterCreator creator; + ColorSpaceConverterCapabilitiesBuilder capabilitiesBuilder; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_COLORSPACE_CONVERTER_EXTENSION_H diff --git a/framework/algorithm/extension_manager/include/detail_enhancer_extension.h b/framework/algorithm/extension_manager/include/detail_enhancer_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..498ee8723ac9912d7c93608ceb055acc06b4c7bb --- /dev/null +++ b/framework/algorithm/extension_manager/include/detail_enhancer_extension.h @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_DETAIL_ENHANCER_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_DETAIL_ENHANCER_EXTENSION_H + +#include + +#include "detail_enhancer_base.h" +#include "detail_enhancer_capability.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { + +struct DetailEnhancerExtension : public ExtensionBase { + DetailEnhancerCreator creator; + DetailEnhancerCapabilitiesBuilder capabilitiesBuilder; +}; + +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_DETAIL_ENHANCER_EXTENSION_H \ No newline at end of file diff --git a/framework/algorithm/extension_manager/include/extension_base.h b/framework/algorithm/extension_manager/include/extension_base.h new file mode 100644 index 0000000000000000000000000000000000000000..0bf31e56feb29c59b048eda8914dfde86a07ab15 --- /dev/null +++ b/framework/algorithm/extension_manager/include/extension_base.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_BASE_H +#define FRMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_BASE_H + +#include + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +enum class ExtensionType { + COLORSPACE_CONVERTER, + COLORSPACE_CONVERTER_DISPLAY, + METADATA_GENERATOR, + DETAIL_ENHANCER, + VIDEO_REFRESHRATE_PREDICTION, + AIHDR_ENHANCER +}; + +struct ExtensionInfo { + ExtensionType type {ExtensionType::COLORSPACE_CONVERTER}; + std::string name; + std::string version; +}; + +struct ExtensionBase { + virtual ~ExtensionBase() = default; + ExtensionInfo info; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_BASE_H diff --git a/framework/algorithm/extension_manager/include/extension_manager.h b/framework/algorithm/extension_manager/include/extension_manager.h new file mode 100644 index 0000000000000000000000000000000000000000..840cf15d2dd987fe9422f768202bc14e9d521f32 --- /dev/null +++ b/framework/algorithm/extension_manager/include/extension_manager.h @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_MANAGER_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_MANAGER_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "aihdr_enhancer.h" +#include "aihdr_enhancer_extension.h" +#include "static_extension_list.h" +#include "extension_base.h" +#include "frame_info.h" +#include "colorspace_converter.h" +#include "metadata_generator.h" +#include "detail_enhancer_image.h" +#include "video_refreshrate_prediction.h" +#include "colorspace_converter_capability.h" +#include "colorspace_converter_display_capability.h" +#include "metadata_generator_capability.h" +#include "colorspace_converter_extension.h" +#include "colorspace_converter_display_extension.h" +#include "metadata_generator_extension.h" +#include "detail_enhancer_extension.h" +#include "video_refreshrate_prediction_extension.h" +#include "utils.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +namespace { +/* +{ + (inputColorSpaceDesc, inputPixelFormat, outputColorSpaceDesc, outputPixelFormat): [ + (rank, version, extensionListIndex), + (rank, version, extensionListIndex), + ...... + ] +}*/ +using ColorSpaceConverterCapabilityMap = + std::map< + std::tuple, + std::vector>>; +using ColorSpaceConverterDisplayCapabilityMap = ColorSpaceConverterCapabilityMap; +/* +{ + (colorSpaceDesc, pixelFormat, metadataGeneratorAlgoType): [ + (rank, version, extensionListIndex), + (rank, version, extensionListIndex), + ...... + ] +} +*/ +using MetadataGeneratorCapabilityMap = + std::map< + std::tuple, + std::vector>>; +using DetailEnhancerCapabilityMap = std::map; +using AihdrEnhancerCapabilityMap = + std::map< + std::tuple, + std::vector>>; + +using ColorSpaceConverterDisplaySet = std::set>; +using ColorSpaceConverterDisplayExtensionSet = std::set>; +} + +class ExtensionManager { +public: + static ExtensionManager& GetInstance(); + + bool IsColorSpaceConversionSupported(const FrameInfo &inputInfo, const FrameInfo &outputInfo) const; + std::shared_ptr CreateColorSpaceConverter(const FrameInfo &inputInfo, + const FrameInfo &outputInfo, Extension::ExtensionInfo &extensionInfo) const; + ColorSpaceConverterDisplaySet CreateColorSpaceConverterDisplay() const; + std::shared_ptr CreateMetadataGenerator(const FrameInfo &inputInfo, + Extension::ExtensionInfo &extensionInfo, MetadataGeneratorAlgoType algoType) const; + std::shared_ptr CreateAihdrEnhancer(const FrameInfo &inputInfo, + Extension::ExtensionInfo &extensionInfo) const; + std::shared_ptr CreateDetailEnhancer(uint32_t level) const; + std::shared_ptr CreateVideoRefreshRatePredictor() const; + + using InstanceVariableType = std::variant, + std::shared_ptr, + std::shared_ptr, + std::shared_ptr>; + void IncreaseInstance(); + void DecreaseInstance(); + int32_t NewInstanceId(const InstanceVariableType& instance); + int32_t RemoveInstanceReference(int32_t& id); + std::optional GetInstance(int32_t id); + bool FindImageConverterExtension(const FrameInfo &inputInfo, const FrameInfo &outputInfo) const; + bool FindImageComposeExtension(const FrameInfo &inputInfo, const FrameInfo &outputInfo) const; + bool FindImageDecomposeExtension(const FrameInfo &inputInfo, const FrameInfo &outputInfo) const; + bool FindImageMetadataGenExtension(const FrameInfo &inputInfo) const; +private: + VPEAlgoErrCode Init(); + ExtensionManager(); + ~ExtensionManager(); + ExtensionManager(const ExtensionManager&) = delete; + ExtensionManager& operator=(const ExtensionManager&) = delete; + + std::shared_ptr FindColorSpaceConverterExtension(const FrameInfo &inputInfo, + const FrameInfo &outputInfo) const; + ColorSpaceConverterDisplayExtensionSet FindColorSpaceConverterDisplayExtension() const; + std::shared_ptr FindMetadataGeneratorExtension(const FrameInfo &inputInfo, + MetadataGeneratorAlgoType algoType) const; + std::shared_ptr FindAihdrEnhancerExtension(const FrameInfo &inputInfo) const; + std::shared_ptr FindDetailEnhancerExtension(uint32_t level) const; + ExtensionList LoadExtensions() const; + VPEAlgoErrCode LoadStaticExtensions(ExtensionList& extensionList) const; + ExtensionList LoadStaticImageExtensions( + const std::unordered_map staticListRegisterMap) const; + ExtensionList LoadDynamicMetadataGenExtensions() const; + ExtensionList LoadDynamicComposeExtensions() const; + ExtensionList LoadDynamicDecomposeExtensions() const; + void LoadDynamicExtensions(ExtensionList& extensionList) const; + template T BuildCaps(const ExtensionList& extensionList) const; + VPEAlgoErrCode BuildColorSpaceConverterCaps(const std::shared_ptr& ext, size_t idx, + ColorSpaceConverterCapabilityMap& colorSpaceConverterCapabilityMap) const; + VPEAlgoErrCode BuildColorSpaceConverterDisplayCaps(const std::shared_ptr& ext, size_t idx, + ColorSpaceConverterDisplayCapabilityMap& colorSpaceConverterDisplayCapabilityMap) const; + VPEAlgoErrCode BuildMetadataGeneratorCaps(const std::shared_ptr& ext, size_t idx, + MetadataGeneratorCapabilityMap& metadataGeneratorCapabilityMap) const; + VPEAlgoErrCode BuildAihdrEnhancerCaps(const std::shared_ptr& ext, size_t idx, + AihdrEnhancerCapabilityMap& aihdrEnhancerCapabilityMap) const; + VPEAlgoErrCode BuildDetailEnhancerCaps(const std::shared_ptr& ext, size_t idx, + DetailEnhancerCapabilityMap& detailEnhancerCapabilityMap) const; + VPEAlgoErrCode ExtractColorSpaceConverterCap(const ColorSpaceConverterCapability& cap, size_t idx, + ColorSpaceConverterCapabilityMap& colorSpaceConverterCapabilityMap) const; + VPEAlgoErrCode ExtractMetadataGeneratorCap(const MetadataGeneratorCapability &cap, size_t idx, + MetadataGeneratorAlgoType algoType, MetadataGeneratorCapabilityMap& metadataGeneratorCapabilityMap) const; + VPEAlgoErrCode ExtractAihdrEnhancerCap(const AihdrEnhancerCapability &cap, size_t idx, + AihdrEnhancerCapabilityMap& aihdrEnhancerCapabilityMap) const; + + std::atomic initialized_ {false}; + + static constexpr int32_t MAX_INSTANCE_NUM { 1024 }; + std::mutex instanceManagementMtx_; + std::mutex instanceCountMtx_; + int32_t currentId_ { 0 }; + int32_t usedInstance_ { 0 }; + std::array, MAX_INSTANCE_NUM> instanceList_ { std::nullopt }; +}; + extern "C" bool ImageProcessing_IsColorSpaceConversionSupported(const ColorSpaceInfo inputInfo, + const ColorSpaceInfo outputInfo); + extern "C" bool ImageProcessing_IsCompositionSupported(const ColorSpaceInfo inputInfo, + const ColorSpaceInfo outputInfo); + extern "C" bool ImageProcessing_IsDecompositionSupported(const ColorSpaceInfo inputInfo, + const ColorSpaceInfo outputInfo); + extern "C" bool ImageProcessing_IsMetadataGenerationSupported(const ColorSpaceInfo inputInfo); +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_EXTENSION_MANAGER_H diff --git a/framework/algorithm/extension_manager/include/metadata_generator_extension.h b/framework/algorithm/extension_manager/include/metadata_generator_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..311ffe489003cc5876b27c473d90c9e6b4570871 --- /dev/null +++ b/framework/algorithm/extension_manager/include/metadata_generator_extension.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_METADATA_GENERATOR_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_METADATA_GENERATOR_EXTENSION_H + +#include +#include "metadata_generator_base.h" +#include "metadata_generator_capability.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +struct MetadataGeneratorExtension : public ExtensionBase { + MetadataGeneratorCreator creator; + MetadataGeneratorCapabilitiesBuilder capabilitiesBuilder; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_METADATA_GENERATOR_EXTENSION_H diff --git a/framework/algorithm/extension_manager/include/static_extension_list.h b/framework/algorithm/extension_manager/include/static_extension_list.h new file mode 100644 index 0000000000000000000000000000000000000000..5af2ea4f8ff0a512fc34b827d2f84be194624a11 --- /dev/null +++ b/framework/algorithm/extension_manager/include/static_extension_list.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_STATIC_EXTENSION_LIST_H +#define VPE_FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_STATIC_EXTENSION_LIST_H + +namespace OHOS::Media::VideoProcessingEngine::Extension { +using RegisterExtensionFunc = void (*)(uintptr_t extensionListAddr); +const std::unordered_map staticExtensionsRegisterMap = { +}; +} // namespace OHOS::Media::VideoProcessingEngine::Extension + +#endif // VPE_FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_STATIC_EXTENSION_LIST_H diff --git a/framework/algorithm/extension_manager/include/utils.h b/framework/algorithm/extension_manager/include/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..2f8ed4edeec9555a1c2fb9f70c37d974ea0401ee --- /dev/null +++ b/framework/algorithm/extension_manager/include/utils.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_UTILS_H +#define FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_UTILS_H + +#include +#include +#include +#include +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +namespace Utils { +using RegisterExtensionFunc = std::function>()>; +} // namespace Utils + +using ExtensionList = std::vector>; + +void DoRegisterExtensions(uintptr_t addr, Utils::RegisterExtensionFunc func); +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#define EXTENSION_EXPORT extern "C" __attribute__((visibility("default"))) + +#define REGISTER_EXTENSIONS(libName, registerFunc) \ + EXTENSION_EXPORT void Register##libName##Extensions(uintptr_t extensionListAddr) \ + { \ + OHOS::Media::VideoProcessingEngine::Extension::DoRegisterExtensions(extensionListAddr, (registerFunc)); \ + } + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_MANAGER_UTILS_H diff --git a/framework/algorithm/extension_manager/include/video_refreshrate_prediction_extension.h b/framework/algorithm/extension_manager/include/video_refreshrate_prediction_extension.h new file mode 100644 index 0000000000000000000000000000000000000000..c0f4b5b0107d5c689aae87ba26c35d0eca50c883 --- /dev/null +++ b/framework/algorithm/extension_manager/include/video_refreshrate_prediction_extension.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_EXTENSION_VIDEO_REFRESHRATE_PREDICTION_EXTENSION_H +#define FRAMEWORK_ALGORITHM_EXTENSION_VIDEO_REFRESHRATE_PREDICTION_EXTENSION_H + +#include "video_refreshrate_prediction_base.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { +struct VideoRefreshratePredictionExtension : public ExtensionBase { + VideoRefreshRatePredictionCreator creator; +}; +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_EXTENSION_VIDEO_REFRESHRATE_PREDICTION_EXTENSION_H diff --git a/framework/algorithm/extension_manager/utils.cpp b/framework/algorithm/extension_manager/utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c28a74aaa33a61283830697300cd17be33335e9b --- /dev/null +++ b/framework/algorithm/extension_manager/utils.cpp @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "utils.h" +#include +#include +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace Extension { + +void DoRegisterExtensions(uintptr_t addr, Utils::RegisterExtensionFunc func) +{ + CHECK_AND_RETURN_LOG(func, "Register function is invalid"); + auto extensionList = reinterpret_cast(addr); + CHECK_AND_RETURN_LOG(extensionList != nullptr, "Extensionlist is nullptr"); + auto extensions = func(); + std::copy(extensions.begin(), extensions.end(), std::back_inserter(*extensionList)); + + VPE_LOGD("%{public}zu extensions loaded", extensions.size()); + return; +} +} // namespace Extension +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/metadata_generator/include/metadata_generator_base.h b/framework/algorithm/metadata_generator/include/metadata_generator_base.h new file mode 100644 index 0000000000000000000000000000000000000000..87a7fd2f44bc4ef2663715e0d17839d88625876d --- /dev/null +++ b/framework/algorithm/metadata_generator/include/metadata_generator_base.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_BASE_H +#define FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_BASE_H + +#include +#include +#include "nocopyable.h" +#include "frame_info.h" +#include "vpe_context.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class MetadataGeneratorBase : public NoCopyable { +public: + virtual ~MetadataGeneratorBase() = default; + virtual VPEAlgoErrCode Init(VPEContext context) = 0; + virtual VPEAlgoErrCode Deinit() = 0; + virtual VPEAlgoErrCode SetParameter(const MetadataGeneratorParameter ¶meter) = 0; + virtual VPEAlgoErrCode GetParameter(MetadataGeneratorParameter ¶meter) = 0; + virtual VPEAlgoErrCode Process(const sptr &input) = 0; +}; + +using MetadataGeneratorCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_BASE_H diff --git a/framework/algorithm/metadata_generator/include/metadata_generator_capability.h b/framework/algorithm/metadata_generator/include/metadata_generator_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..a74807a45bb1b2c6b1dac2cc3bc7770f9b21ee84 --- /dev/null +++ b/framework/algorithm/metadata_generator/include/metadata_generator_capability.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_CAPABILITY_H +#define FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_CAPABILITY_H + +#include +#include +#include "frame_info.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct MetadataGeneratorCapability { + ColorSpaceDescription colorspaceDesc; + std::vector pixelFormats; + uint32_t rank; + int32_t version; +}; + +using MetadataGeneratorCapabilitiesBuilder = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_CAPABILITY_H diff --git a/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h b/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..7ed45db635a96bdaa68a87018dbdf4494c844aba --- /dev/null +++ b/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_FWK_H +#define FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_FWK_H + +#include +#include +#include +#include "metadata_generator.h" +#include "metadata_generator_base.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class MetadataGeneratorFwk : public MetadataGenerator { +public: + MetadataGeneratorFwk(); + MetadataGeneratorFwk(std::shared_ptr openGlContext); + ~MetadataGeneratorFwk(); + VPEAlgoErrCode SetParameter(const MetadataGeneratorParameter ¶meter) override; + VPEAlgoErrCode GetParameter(MetadataGeneratorParameter ¶meter) const override; + VPEAlgoErrCode Process(const sptr &input) override; + +private: + VPEAlgoErrCode Init(const sptr &input); + void OpenGLInit(); + + std::shared_ptr impl_ { nullptr }; + MetadataGeneratorParameter parameter_; + std::atomic initialized_ { false }; + Extension::ExtensionInfo extensionInfo_; + VPEContext context; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_FWK_H diff --git a/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp b/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3018ba05fcece4562dfb4e46152cbd6d6f0d869d --- /dev/null +++ b/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "metadata_generator_fwk.h" +#include "video_processing_client.h" +#include "extension_manager.h" +#include "native_buffer.h" +#include "surface_buffer.h" +#include "vpe_trace.h" +#include "vpe_log.h" +#include "EGL/egl.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +MetadataGeneratorFwk::MetadataGeneratorFwk() +{ + OpenGLInit(); + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + VPE_LOGI("VPE Framework connect and load SA!"); + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +void MetadataGeneratorFwk::OpenGLInit() +{ + context.glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY); + if (context.glDisplay == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("MetadataGeneratorFwk Get display failed!"); + } + EGLint major; + EGLint minor; + if (eglInitialize(context.glDisplay, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("MetadataGeneratorFwk eglInitialize failed!"); + } +} + +MetadataGeneratorFwk::MetadataGeneratorFwk(std::shared_ptr openglContext) +{ + if (openglContext != nullptr) { + context.glDisplay = openglContext->display; + } + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + VPE_LOGI("VPE Framework connect and load SA!"); + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +MetadataGeneratorFwk::~MetadataGeneratorFwk() +{ + if (impl_) { + impl_->Deinit(); + impl_ = nullptr; + } + if (context.glDisplay != EGL_NO_DISPLAY) { + eglTerminate(context.glDisplay); + } + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +VPEAlgoErrCode MetadataGeneratorFwk::SetParameter(const MetadataGeneratorParameter ¶meter) +{ + parameter_ = parameter; + VPE_LOGI("MetadataGeneratorFwk SetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode MetadataGeneratorFwk::GetParameter(MetadataGeneratorParameter ¶meter) const +{ + parameter = parameter_; + VPE_LOGI("MetadataGeneratorFwk GetParameter Succeed"); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode MetadataGeneratorFwk::Process(const sptr &input) +{ + CHECK_AND_RETURN_RET_LOG(input != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Input is nullptr"); + CHECK_AND_RETURN_RET_LOG((input->GetUsage() & (BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER)) != 0, + VPE_ALGO_ERR_INVALID_VAL, "Input surfacebuffer usage need NATIVEBUFFER_USAGE_HW_RENDER"); + VPEAlgoErrCode ret = Init(input); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Init failed"); + + VPE_SYNC_TRACE; + ret = impl_->Process(input); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Process failed, ret: %{public}d", ret); + + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode MetadataGeneratorFwk::Init(const sptr &input) +{ + if (initialized_) { + impl_->SetParameter(parameter_); + return VPE_ALGO_ERR_OK; + } + auto &manager = Extension::ExtensionManager::GetInstance(); + + VPE_SYNC_TRACE; + + FrameInfo info(input); + impl_ = manager.CreateMetadataGenerator(info, extensionInfo_, parameter_.algoType); + CHECK_AND_RETURN_RET_LOG(impl_ != nullptr, VPE_ALGO_ERR_NOT_IMPLEMENTED, "Create failed"); + + int32_t ret = impl_->Init(context); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_NOT_IMPLEMENTED, "Init failed"); + impl_->SetParameter(parameter_); + initialized_ = true; + VPE_LOGI("MetadataGeneratorFwk Init Succeed"); + + return VPE_ALGO_ERR_OK; +} + +std::shared_ptr MetadataGenerator::Create() +{ + auto p = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create MetadataGenerator failed"); + return std::static_pointer_cast(p); +} + +std::shared_ptr MetadataGenerator::Create(std::shared_ptr openglContext) +{ + auto p = std::make_shared(openglContext); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create MetadataGenerator failed"); + return std::static_pointer_cast(p); +} + +int32_t MetadataGeneratorCreate(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + auto p = MetadataGenerator::Create(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, VPE_ALGO_ERR_INVALID_VAL, "cannot create instance"); + Extension::ExtensionManager::InstanceVariableType instanceVar { p }; + int32_t newId = Extension::ExtensionManager::GetInstance().NewInstanceId(instanceVar); + CHECK_AND_RETURN_RET_LOG(newId != -1, VPE_ALGO_ERR_NO_MEMORY, "cannot create more instance"); + *instance = newId; + return VPE_ALGO_ERR_OK; +} +int32_t MetadataGeneratorProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage) +{ + CHECK_AND_RETURN_RET_LOG((inputImage != nullptr), VPE_ALGO_ERR_INVALID_VAL, + "invalid parameters"); + auto someInstance = Extension::ExtensionManager::GetInstance().GetInstance(instance); + CHECK_AND_RETURN_RET_LOG(someInstance != std::nullopt, VPE_ALGO_ERR_INVALID_VAL, "invalid instance"); + + VPEAlgoErrCode ret = VPE_ALGO_ERR_INVALID_VAL; + auto visitFunc = [inputImage, &ret](auto&& var) { + using VarType = std::decay_t; + if constexpr (std::is_same_v>) { + OH_NativeBuffer* inputImageNativeBuffer = nullptr; + CHECK_AND_RETURN_LOG( + (OH_NativeBuffer_FromNativeWindowBuffer(inputImage, &inputImageNativeBuffer) == GSERROR_OK), + "invalid input image"); + sptr inputImageSurfaceBuffer( + SurfaceBuffer::NativeBufferToSurfaceBuffer(inputImageNativeBuffer)); + (void)var->SetParameter({ MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE}); + ret = var->Process(inputImageSurfaceBuffer); + } else { + VPE_LOGE("instance may be miss used"); + } + }; + std::visit(visitFunc, *someInstance); + + return ret; +} + +int32_t MetadataGeneratorDestroy(int32_t* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VPE_ALGO_ERR_INVALID_VAL, "instance is null"); + return Extension::ExtensionManager::GetInstance().RemoveInstanceReference(*instance); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/metadata_generator_video/include/metadata_generator_video_impl.h b/framework/algorithm/metadata_generator_video/include/metadata_generator_video_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..cebe5c7416d6a6d086c10784040151f23578a255 --- /dev/null +++ b/framework/algorithm/metadata_generator_video/include/metadata_generator_video_impl.h @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef METADATA_GENERATOR_VIDEO_IMPL_H +#define METADATA_GENERATOR_VIDEO_IMPL_H + +#include +#include +#include +#include +#include +#include "metadata_generator_video.h" +#include "surface.h" +#include "sync_fence.h" +#include "metadata_generator_video_common.h" +#include "metadata_generator.h" +#include "algorithm_video_common.h" +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class MetadataGeneratorVideoImpl : public MetadataGeneratorVideo { +public: + MetadataGeneratorVideoImpl(); + ~MetadataGeneratorVideoImpl(); + int32_t Init(); + int32_t Init(std::shared_ptr openglContext); + // 北向调用接口 + int32_t SetCallback(const std::shared_ptr &callback) override; + int32_t SetOutputSurface(sptr surface) override; + sptr CreateInputSurface() override; + int32_t Configure() override; + int32_t Prepare() override; + int32_t Start() override; + int32_t Stop() override; + int32_t Reset() override; + int32_t Release() override; + int32_t NotifyEos() override; + int32_t ReleaseOutputBuffer(uint32_t index, bool render) override; + int32_t Flush() override; + + GSError OnConsumerBufferAvailable(); + GSError OnProducerBufferReleased(); +private: + struct SurfaceBufferWrapper { + public: + SurfaceBufferWrapper() = default; + ~SurfaceBufferWrapper() = default; + + sptr memory{nullptr}; + MdgBufferFlag bufferFlag{MDG_BUFFER_FLAG_NONE}; + sptr fence{nullptr}; + int64_t timestamp; + }; + void InitBuffers(); + bool WaitProcessing(); + bool AcquireInputOutputBuffers( + std::shared_ptr &inputBuffer, std::shared_ptr &outputBuffer); + void DoTask(); + void OnTriggered(); + void Process(std::shared_ptr inputBuffer, std::shared_ptr outputBuffer); + int32_t AttachToNewSurface(sptr newSurface); + int32_t SetOutputSurfaceConfig(sptr surface); + int32_t SetOutputSurfaceRunning(sptr newSurface); + int32_t GetReleaseOutBuffer(); + std::atomic state_{VPEAlgoState::UNINITIALIZED}; + std::shared_ptr cb_{nullptr}; + std::shared_ptr csc_{nullptr}; + std::mutex mutex_; + bool getUsage_{false}; + + // task相关 + std::mutex mtxTaskDone_; + std::condition_variable cvTaskDone_; + std::shared_ptr taskThread_{nullptr}; + std::condition_variable cvTaskStart_; + std::mutex mtxTaskStart_; + std::atomic isRunning_{false}; + std::atomic isProcessing_{false}; + std::atomic isEos_{false}; + + // surface相关 + std::queue> outputBufferAvilQue_; + std::queue> inputBufferAvilQue_; + std::queue> renderBufferAvilQue_; + using RenderBufferAvilMapType = std::map>; + RenderBufferAvilMapType renderBufferAvilMap_; + RenderBufferAvilMapType renderBufferMapBak_; + RenderBufferAvilMapType outputBufferAvilQueBak_; + std::mutex onBqMutex_; // inputsruface buffer + std::mutex renderQueMutex_; // outputsruface buffer + std::mutex surfaceChangeMutex_; + std::mutex surfaceChangeMutex2_; + sptr inputSurface_{nullptr}; + sptr outputSurface_{nullptr}; + static constexpr size_t MAX_BUFFER_CNT{5}; + uint32_t outBufferCnt_{MAX_BUFFER_CNT}; + uint32_t inBufferCnt_{MAX_BUFFER_CNT}; + static constexpr size_t MAX_SURFACE_SEQUENCE{std::numeric_limits::max()}; + uint32_t lastSurfaceSequence_{MAX_SURFACE_SEQUENCE}; + BufferRequestConfig requestCfg_{}; + BufferFlushConfig flushCfg_{}; +}; + +class MetadataGeneratorBufferConsumerListener : public OHOS::IBufferConsumerListener { +public: + explicit MetadataGeneratorBufferConsumerListener(MetadataGeneratorVideoImpl *process) : process_(process) {} + void OnBufferAvailable() override; + +private: + MetadataGeneratorVideoImpl *process_; +}; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // METADATA_GENERATOR_VIDEO_IMPL_H diff --git a/framework/algorithm/metadata_generator_video/metadata_generator_video_impl.cpp b/framework/algorithm/metadata_generator_video/metadata_generator_video_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7a5f0f39ca46794dbda8a1a970907d81447a6c80 --- /dev/null +++ b/framework/algorithm/metadata_generator_video/metadata_generator_video_impl.cpp @@ -0,0 +1,632 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "metadata_generator_video_impl.h" +#include +#include +#include +#include +#include "vpe_log.h" +#include "algorithm_errors.h" +#include "algorithm_common.h" +#include "vpe_trace.h" +#include "algorithm_utils.h" +#include "securec.h" +#include "extension_manager.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +std::shared_ptr MetadataGeneratorVideo::Create() +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init MetadataGeneratorVideoImpl"); + return impl; +} + +std::shared_ptr MetadataGeneratorVideo::Create(std::shared_ptr openglContext) +{ + std::shared_ptr impl = std::make_shared(); + int32_t ret = impl->Init(openglContext); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, nullptr, "failed to init MetadataGeneratorVideoImpl"); + return impl; +} + +MetadataGeneratorVideoImpl::MetadataGeneratorVideoImpl() +{ + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 32; // 32 内存对齐 + requestCfg_.usage = 0; + requestCfg_.format = 0; + requestCfg_.width = 0; + requestCfg_.height = 0; + flushCfg_.timestamp = 0; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = 0; + flushCfg_.damage.h = 0; +} + +MetadataGeneratorVideoImpl::~MetadataGeneratorVideoImpl() +{ + Release(); +} + +int32_t MetadataGeneratorVideoImpl::Init() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, + "Init failed: not in UNINITIALIZED state"); + csc_ = MetadataGenerator::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "MetadataGenerator Create failed"); + + isRunning_.store(true); + taskThread_ = std::make_shared(&MetadataGeneratorVideoImpl::OnTriggered, this); + CHECK_AND_RETURN_RET_LOG(taskThread_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Fatal: No memory"); + + state_ = VPEAlgoState::INITIALIZED; + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::Init(std::shared_ptr openglContext) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, + "Init failed: not in UNINITIALIZED state"); + csc_ = MetadataGenerator::Create(openglContext); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "MetadataGenerator Create failed"); + + isRunning_.store(true); + taskThread_ = std::make_shared(&MetadataGeneratorVideoImpl::OnTriggered, this); + CHECK_AND_RETURN_RET_LOG(taskThread_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "Fatal: No memory"); + + state_ = VPEAlgoState::INITIALIZED; + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::SetCallback(const std::shared_ptr &callback) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Set callback failed: callback is NULL"); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, + VPE_ALGO_ERR_INVALID_STATE, "SetCallback failed: not in INITIALIZED or CONFIGURING state"); + cb_ = callback; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::AttachToNewSurface(sptr newSurface) +{ + std::lock_guard lockrender(renderQueMutex_); + for (auto it = outputBufferAvilQueBak_.begin(); it != outputBufferAvilQueBak_.end(); ++it) { + auto buffer = it->second; + GSError err = newSurface->AttachBufferToQueue(buffer->memory); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "outputbuffer AttachToNewSurface fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::GetReleaseOutBuffer() +{ + std::lock_guard mapLock(renderQueMutex_); + for (RenderBufferAvilMapType::iterator it = renderBufferMapBak_.begin(); it != renderBufferMapBak_.end(); ++it) { + outputBufferAvilQue_.push(it->second); + } + renderBufferMapBak_.clear(); + return VPE_ALGO_ERR_OK; +} + + +int32_t MetadataGeneratorVideoImpl::SetOutputSurfaceConfig(sptr surface) +{ + GSError err = surface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + surface->SetQueueSize(outBufferCnt_); + outputSurface_ = surface; + state_ = VPEAlgoState::CONFIGURING; + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::SetOutputSurfaceRunning(sptr newSurface) +{ + std::lock_guard lockSurface(surfaceChangeMutex_); + std::lock_guard lockSurface2(surfaceChangeMutex2_); + uint64_t oldId = outputSurface_->GetUniqueId(); + uint64_t newId = newSurface->GetUniqueId(); + if (oldId == newId) { + VPE_LOGD("SetOutputSurfaceRunning same surface"); + return VPE_ALGO_ERR_OK; + } + + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + GSError err = newSurface->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener fail"); + newSurface->SetQueueSize(outBufferCnt_); + newSurface->Connect(); + newSurface->CleanCache(); + GetReleaseOutBuffer(); + int32_t ret = AttachToNewSurface(newSurface); + if (ret != VPE_ALGO_ERR_OK) { + return ret; + } + + GraphicTransformType inTransform; + ScalingMode inScaleMode; + inTransform = inputSurface_->GetTransform(); + + outputSurface_ = newSurface; + err = outputSurface_->SetTransform(inTransform); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetTransform fail"); + + if (lastSurfaceSequence_ != MAX_SURFACE_SEQUENCE) { + err = inputSurface_->GetScalingMode(lastSurfaceSequence_, inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "GetScalingMode fail"); + err = outputSurface_->SetScalingMode(inScaleMode); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "SetScalingMode fail"); + } + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::SetOutputSurface(sptr surface) +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VPE_ALGO_ERR_INVALID_VAL, "surface is nullptr"); + CHECK_AND_RETURN_RET_LOG(surface->IsConsumer() == false, VPE_ALGO_ERR_INVALID_VAL, "surface is not producer"); + if (state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING) { + int32_t ret = SetOutputSurfaceConfig(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface config fail"); + } else if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED) { + int32_t ret = SetOutputSurfaceRunning(surface); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, VPE_ALGO_ERR_INVALID_STATE, "SetOutputSurface Running fail"); + } else { + CHECK_AND_RETURN_RET_LOG(false, VPE_ALGO_ERR_INVALID_STATE, "surface state not support SetOutputSurface"); + } + + return VPE_ALGO_ERR_OK; +} + +sptr MetadataGeneratorVideoImpl::CreateInputSurface() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING, nullptr, + "CreateInputSurface failed: not in INITIALIZED or CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ == nullptr, nullptr, "inputSurface already exists"); + + inputSurface_ = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, nullptr, "CreateSurfaceAsConsumer fail"); + sptr listener = new MetadataGeneratorBufferConsumerListener(this); + GSError err = inputSurface_->RegisterConsumerListener(listener); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, nullptr, "RegisterConsumerListener fail"); + + sptr producer = inputSurface_->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + CHECK_AND_RETURN_RET_LOG(producerSurface != nullptr, nullptr, "CreateSurfaceAsProducer fail"); + producerSurface->SetDefaultUsage(BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | + BUFFER_USAGE_MEM_DMA | BUFFER_USAGE_MEM_MMZ_CACHE); + inputSurface_->SetQueueSize(inBufferCnt_); + state_ = VPEAlgoState::CONFIGURING; + + return producerSurface; +} + +int32_t MetadataGeneratorVideoImpl::Configure() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::INITIALIZED || state_ == VPEAlgoState::CONFIGURING || state_ == + VPEAlgoState::STOPPED, VPE_ALGO_ERR_INVALID_STATE, "Configure failed: not in INITIALIZED or CONFIGURING state"); + MetadataGeneratorParameter param; + param.algoType = MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_VIDEO; + int32_t ret = csc_->SetParameter(param); + state_ = (ret == VPE_ALGO_ERR_OK ? VPEAlgoState::CONFIGURING : VPEAlgoState::ERROR); + return ret; +} + +int32_t MetadataGeneratorVideoImpl::Prepare() +{ + std::lock_guard lock(mutex_); + if (state_ == VPEAlgoState::STOPPED) { + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; + } + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::CONFIGURING, VPE_ALGO_ERR_INVALID_STATE, + "Prepare failed: not in CONFIGURING state"); + CHECK_AND_RETURN_RET_LOG(cb_ != nullptr && inputSurface_ != nullptr && outputSurface_ != nullptr, + VPE_ALGO_ERR_INVALID_OPERATION, "Prepare faled: inputSurface or outputSurface or callback is null"); + + state_ = VPEAlgoState::CONFIGURED; + return VPE_ALGO_ERR_OK; +} + +void MetadataGeneratorVideoImpl::InitBuffers() +{ + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = requestCfg_.width; + flushCfg_.damage.h = requestCfg_.height; + for (uint32_t i = 0; i < outBufferCnt_; ++i) { + std::shared_ptr buffer = std::make_shared(); + GSError err = outputSurface_->RequestBuffer(buffer->memory, buffer->fence, requestCfg_); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("RequestBuffer %{public}u failed, GSError=%{public}d", i, err); + continue; + } + outputBufferAvilQue_.push(buffer); + outputBufferAvilQueBak_.insert(std::make_pair(buffer->memory->GetSeqNum(), buffer)); + } +} + +int32_t MetadataGeneratorVideoImpl::Start() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + (state_ == VPEAlgoState::CONFIGURED || state_ == VPEAlgoState::STOPPED || state_ == VPEAlgoState::FLUSHED), + VPE_ALGO_ERR_INVALID_STATE, + "Start failed: not in CONFIGURED or STOPPED state"); + if (isEos_.load()) { + state_ = VPEAlgoState::EOS; + } else { + state_ = VPEAlgoState::RUNNING; + } + cb_->OnState(static_cast(state_.load())); + cvTaskStart_.notify_all(); + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::Stop() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS || state_ == VPEAlgoState::FLUSHED, + VPE_ALGO_ERR_INVALID_STATE, + "Stop failed: not in RUNNING or EOS state"); + + state_ = VPEAlgoState::STOPPED; + if (!isProcessing_) { + cb_->OnState(static_cast(state_.load())); + } + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::Reset() +{ + std::lock_guard lock(mutex_); + CHECK_AND_RETURN_RET_LOG( + state_ != VPEAlgoState::UNINITIALIZED, VPE_ALGO_ERR_INVALID_STATE, "Start failed: not in right state"); + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::INITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + csc_ = MetadataGenerator::Create(); + CHECK_AND_RETURN_RET_LOG(csc_ != nullptr, VPE_ALGO_ERR_UNKNOWN, "ColorSpaceConverter Create failed"); + isEos_.store(false); + + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::Release() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + state_ = VPEAlgoState::UNINITIALIZED; + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + + inputSurface_ = nullptr; + std::unique_lock lockSurface(surfaceChangeMutex_); + std::unique_lock lockSurface2(surfaceChangeMutex2_); + if (outputSurface_ != nullptr) { + outputSurface_->UnRegisterReleaseListener(); + outputSurface_->CleanCache(true); + outputSurface_ = nullptr; + } + lockSurface2.unlock(); + lockSurface.unlock(); + cb_ = nullptr; + csc_ = nullptr; + isRunning_.store(false); + } + if (taskThread_ != nullptr && taskThread_->joinable()) { + cvTaskStart_.notify_all(); + taskThread_->join(); + } + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::Flush() +{ + std::lock_guard lock(mutex_); + { + std::unique_lock lockTask(mtxTaskDone_); + cvTaskDone_.wait(lockTask, [this]() { return isProcessing_.load() == false; }); + } + + { + std::unique_lock lockInQue(onBqMutex_); + std::queue> tempQueue; + inputBufferAvilQue_.swap(tempQueue); + for (; tempQueue.size() != 0;) { + auto buffer = tempQueue.front(); + tempQueue.pop(); + CHECK_AND_RETURN_RET_LOG(buffer && buffer->memory != nullptr, VPE_ALGO_ERR_UNKNOWN, "Invalid memory"); + GSError err = inputSurface_->ReleaseBuffer(buffer->memory, -1); + CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "Release buffer failed"); + } + } + + std::lock_guard mapLock(renderQueMutex_); + for (auto &[id, buffer] : renderBufferAvilMap_) { + VPE_LOGD("Reclaim buffer %{public}" PRIu64, id); + outputBufferAvilQue_.push(buffer); + } + renderBufferAvilMap_.clear(); + state_ = VPEAlgoState::FLUSHED; + return VPE_ALGO_ERR_OK; +} + +void MetadataGeneratorVideoImpl::Process(std::shared_ptr inputBuffer, + std::shared_ptr outputBuffer) +{ + VPETrace videoTrace("MetadataGeneratorVideoImpl::Process"); + int32_t ret = VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED; + outputBuffer->timestamp = inputBuffer->timestamp; + sptr surfaceInputBuffer = inputBuffer->memory; + sptr surfaceOutputBuffer = outputBuffer->memory; + bool copyRet = AlgorithmUtils::CopySurfaceBufferToSurfaceBuffer(surfaceInputBuffer, surfaceOutputBuffer); + if (!copyRet) { + requestCfg_.width = surfaceInputBuffer->GetWidth(); + requestCfg_.height = surfaceInputBuffer->GetHeight(); + requestCfg_.format = surfaceInputBuffer->GetFormat(); + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_COLORSPACE_INFO); + surfaceOutputBuffer->EraseMetadataKey(ATTRKEY_HDR_METADATA_TYPE); + if (surfaceOutputBuffer->Alloc(requestCfg_) == GSERROR_OK) { + copyRet = AlgorithmUtils::CopySurfaceBufferToSurfaceBuffer(surfaceInputBuffer, surfaceOutputBuffer); + } + } + if (copyRet) { + VPETrace cscTrace("MetadataGeneratorVideoImpl::csc_->Process"); + ret = csc_->Process(surfaceOutputBuffer); + } + if (ret != 0 && cb_) { + cb_->OnError(ret); + } + inputSurface_->ReleaseBuffer(surfaceInputBuffer, -1); + if (!ret) { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(outputBuffer); + } + + if (!ret && cb_) { + cb_->OnOutputBufferAvailable(surfaceOutputBuffer->GetSeqNum(), outputBuffer->bufferFlag); + } +} + +bool MetadataGeneratorVideoImpl::WaitProcessing() +{ + if (!isRunning_.load()) { + return false; + } + { + std::unique_lock lock(mtxTaskStart_); + cvTaskStart_.wait(lock, [this]() { + std::lock_guard inQueueLock(onBqMutex_); + std::lock_guard outQueueLock(renderQueMutex_); + return ((inputBufferAvilQue_.size() > 0 && outputBufferAvilQue_.size() > 0) || !isRunning_.load()); + }); + } + + return true; +} + +bool MetadataGeneratorVideoImpl::AcquireInputOutputBuffers(std::shared_ptr& inputBuffer, + std::shared_ptr& outputBuffer) +{ + std::lock_guard lockOnBq(onBqMutex_); + std::lock_guard mapLock(renderQueMutex_); + if (inputBufferAvilQue_.size() == 0 || outputBufferAvilQue_.size() == 0) { + if (state_ == VPEAlgoState::STOPPED) { + cb_->OnState(static_cast(state_.load())); + } + return false; + } + inputBuffer = inputBufferAvilQue_.front(); + outputBuffer = outputBufferAvilQue_.front(); + inputBufferAvilQue_.pop(); + outputBufferAvilQue_.pop(); + return inputBuffer && outputBuffer; +} + +void MetadataGeneratorVideoImpl::DoTask() +{ + std::shared_ptr inputBuffer = nullptr; + std::shared_ptr outputBuffer = nullptr; + while (true) { + std::lock_guard lockTask(mtxTaskDone_); + if (!isRunning_.load()) { + return; + } + isProcessing_.store(true); + + if (!AcquireInputOutputBuffers(inputBuffer, outputBuffer)) { + break; + } + if (inputBuffer->bufferFlag == MDG_BUFFER_FLAG_EOS) { + { + std::unique_lock lockOnBq(renderQueMutex_); + renderBufferAvilMap_.emplace(outputBuffer->memory->GetSeqNum(), outputBuffer); + } + if (cb_) { + cb_->OnOutputBufferAvailable(outputBuffer->memory->GetSeqNum(), MDG_BUFFER_FLAG_EOS); + } + break; + } + Process(inputBuffer, outputBuffer); + } + isProcessing_.store(false); + cvTaskDone_.notify_all(); +} + +void MetadataGeneratorVideoImpl::OnTriggered() +{ + while (true) { + if (!WaitProcessing()) { + break; + } + + DoTask(); + } +} + +int32_t MetadataGeneratorVideoImpl::ReleaseOutputBuffer(uint32_t index, bool render) +{ + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS, VPE_ALGO_ERR_INVALID_STATE, + "ReleaseOutputBuffer failed: not in RUNNING or EOS state"); + + std::unique_lock lockRenderQue(renderQueMutex_); + auto search = renderBufferAvilMap_.find(index); + if (search == renderBufferAvilMap_.end()) { + VPE_LOGE("ReleaseOutputBuffer invalid index %{public}d ", index); + return VPE_ALGO_ERR_INVALID_PARAM; + } + auto buffer = search->second; + renderBufferAvilMap_.erase(search); + lockRenderQue.unlock(); + + if (render) { + flushCfg_.timestamp = buffer->timestamp; + { + std::lock_guard lockSurface(surfaceChangeMutex_); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + auto ret = outputSurface_->FlushBuffer(buffer->memory, -1, flushCfg_); + if (ret != 0) { + VPE_LOGE("ReleaseOutputBuffer flushbuffer err %{public}d ", (int)ret); + return VPE_ALGO_ERR_UNKNOWN; + } + } + std::lock_guard renderLock(renderQueMutex_); + renderBufferMapBak_.emplace(buffer->memory->GetSeqNum(), buffer); + } else { + std::lock_guard renderLock(renderQueMutex_); + outputBufferAvilQue_.push(buffer); + } + return VPE_ALGO_ERR_OK; +} + +int32_t MetadataGeneratorVideoImpl::NotifyEos() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockOnBq(onBqMutex_); + CHECK_AND_RETURN_RET_LOG(state_ == VPEAlgoState::RUNNING, VPE_ALGO_ERR_INVALID_STATE, + "NotifyEos failed: not in RUNNING state"); + state_ = VPEAlgoState::EOS; + isEos_.store(true); + std::shared_ptr buf = std::make_shared(); + buf->bufferFlag = MDG_BUFFER_FLAG_EOS; + inputBufferAvilQue_.push(buf); + + cvTaskStart_.notify_all(); + + return VPE_ALGO_ERR_OK; +} + +GSError MetadataGeneratorVideoImpl::OnProducerBufferReleased() +{ + { + std::unique_lock lockSurface(surfaceChangeMutex2_); + std::lock_guard outQueLock(renderQueMutex_); + std::shared_ptr buf = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(outputSurface_ != nullptr, GSERROR_OK, "outputSurface_ is nullptr"); + if (renderBufferMapBak_.empty()) { + return GSERROR_OK; + } + GSError err = outputSurface_->RequestBuffer(buf->memory, buf->fence, requestCfg_); + if (err != GSERROR_OK || buf->memory == nullptr) { + VPE_LOGE("RequestBuffer failed, GSError=%{public}d", err); + return err; + } + lockSurface.unlock(); + outputBufferAvilQue_.push(buf); + auto bufSeqNum = buf->memory->GetSeqNum(); + lastSurfaceSequence_ = bufSeqNum; + renderBufferMapBak_.erase(bufSeqNum); + auto it = outputBufferAvilQueBak_.find(bufSeqNum); + if (it == outputBufferAvilQueBak_.end()) { + outputBufferAvilQueBak_.insert(std::make_pair(bufSeqNum, buf)); + auto firstSeqNum = renderBufferMapBak_.begin(); + if (firstSeqNum != renderBufferMapBak_.end()) { + outputBufferAvilQueBak_.erase(firstSeqNum->first); + renderBufferMapBak_.erase(firstSeqNum->first); + } + } + } + + if (state_ == VPEAlgoState::RUNNING || state_ == VPEAlgoState::EOS) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +GSError MetadataGeneratorVideoImpl::OnConsumerBufferAvailable() +{ + std::lock_guard lock(mutex_); + std::lock_guard lockInQue(onBqMutex_); + CHECK_AND_RETURN_RET_LOG(inputSurface_ != nullptr, GSERROR_OK, "inputSurface is nullptr"); + CHECK_AND_RETURN_RET_LOG(state_ != VPEAlgoState::STOPPED, GSERROR_OK, "state change to stop"); + std::shared_ptr buffer = std::make_shared(); + OHOS::Rect damage; + GSError err = inputSurface_->AcquireBuffer(buffer->memory, buffer->fence, buffer->timestamp, damage); + if (err != GSERROR_OK || buffer->memory == nullptr) { + VPE_LOGW("AcquireBuffer failed, GSError=%{public}d", err); + return err; + } + inputBufferAvilQue_.push(buffer); + + if (!getUsage_) { + requestCfg_.usage = (buffer->memory->GetUsage() | requestCfg_.usage); + getUsage_ = true; + requestCfg_.width = buffer->memory->GetWidth(); + requestCfg_.height = buffer->memory->GetHeight(); + requestCfg_.format = buffer->memory->GetFormat(); + InitBuffers(); + } + + if (state_ == VPEAlgoState::RUNNING) { + cvTaskStart_.notify_all(); + } + + return GSERROR_OK; +} + +void MetadataGeneratorBufferConsumerListener::OnBufferAvailable() +{ + if (process_ != nullptr) { + process_->OnConsumerBufferAvailable(); + } +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_base.h b/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_base.h new file mode 100644 index 0000000000000000000000000000000000000000..9be8a4ce049e73c8176715ab1969f1a0dd89eb29 --- /dev/null +++ b/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_base.h @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_VIDEO_REFRESHRATE_PREDICTION_BASE_H +#define FRAMEWORK_ALGORITHM_VIDEO_REFRESHRATE_PREDICTION_BASE_H + +#include +#include +#include "nocopyable.h" +#include "frame_info.h" + + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VideoRefreshRatePredictionBase : public NoCopyable { +public: + virtual ~VideoRefreshRatePredictionBase() = default; + virtual VPEAlgoErrCode CheckVRRSupport(std::string processName) = 0; + virtual VPEAlgoErrCode Process(const sptr &input, int videoFps, int codecType) = 0; +}; + +using VideoRefreshRatePredictionCreator = std::function()>; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_VIDEO_REFRESHRATE_PREDICTION_BASE_H diff --git a/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_fwk.h b/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_fwk.h new file mode 100644 index 0000000000000000000000000000000000000000..f9e998f3ea3f45b9e6516f4210611b97be68e10f --- /dev/null +++ b/framework/algorithm/video_variable_refresh_rate/include/video_refreshrate_prediction_fwk.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_ALGORITHM_VIDEO_REFRESHRATE_PREDICTION_FWK_H +#define FRAMEWORK_ALGORITHM_VIDEO_REFRESHRATE_PREDICTION_FWK_H + +#include +#include +#include +#include "video_refreshrate_prediction.h" +#include "video_refreshrate_prediction_base.h" +#include "extension_base.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VideoRefreshRatePredictionFwk : public VideoRefreshRatePrediction { +public: + VideoRefreshRatePredictionFwk(); + ~VideoRefreshRatePredictionFwk(); + VPEAlgoErrCode CheckVRRSupport(std::string processName) override; + VPEAlgoErrCode Process(const sptr &input, int videoFps, int codecType) override; + +private: + VPEAlgoErrCode Init(); + std::shared_ptr impl_ { nullptr }; + std::atomic initialized_ { false }; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // FRAMEWORK_ALGORITHM_METADATA_GENERATOR_METADATA_GENERATOR_FWK_H diff --git a/framework/algorithm/video_variable_refresh_rate/video_refreshrate_prediction_fwk.cpp b/framework/algorithm/video_variable_refresh_rate/video_refreshrate_prediction_fwk.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3893fcdbe8cbdffd849bf75f60cd7ab1e5600530 --- /dev/null +++ b/framework/algorithm/video_variable_refresh_rate/video_refreshrate_prediction_fwk.cpp @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_refreshrate_prediction_fwk.h" +#include "extension_manager.h" +#include "surface_buffer.h" +#include "vpe_trace.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +VideoRefreshRatePredictionFwk::VideoRefreshRatePredictionFwk() +{ + Extension::ExtensionManager::GetInstance().IncreaseInstance(); +} + +VideoRefreshRatePredictionFwk::~VideoRefreshRatePredictionFwk() +{ + impl_ = nullptr; + Extension::ExtensionManager::GetInstance().DecreaseInstance(); +} + +VPEAlgoErrCode VideoRefreshRatePredictionFwk::Process(const sptr &input, int videoFps, int codecType) +{ + VPEAlgoErrCode ret = Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "VRR Init failed"); + ret = impl_->Process(input, videoFps, codecType); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Process failed, ret: %{public}d", ret); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VideoRefreshRatePredictionFwk::CheckVRRSupport(std::string processName) +{ + VPEAlgoErrCode ret = Init(); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "VRR Init failed"); + ret = impl_->CheckVRRSupport(processName); + return ret; +} + +VPEAlgoErrCode VideoRefreshRatePredictionFwk::Init() +{ + if (initialized_) { + return VPE_ALGO_ERR_OK; + } + auto& manager = Extension::ExtensionManager::GetInstance(); + VPE_SYNC_TRACE; + impl_ = manager.CreateVideoRefreshRatePredictor(); + CHECK_AND_RETURN_RET_LOG(impl_ != nullptr, VPE_ALGO_ERR_NOT_IMPLEMENTED, "Create impl failed"); + initialized_ = true; + VPE_LOGI("create VideoRefreshRatePredictionFwk Successed"); + return VPE_ALGO_ERR_OK; +} + + +std::shared_ptr VideoRefreshRatePrediction::Create() +{ + auto p = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(p != nullptr, nullptr, "Create VideoRefreshRatePrediction failed"); + return std::static_pointer_cast(p); +} + +struct VideoRefreshRatePredictionHandleImpl { + std::shared_ptr obj; +}; + +VideoRefreshRatePredictionHandle *VideoRefreshRatePredictionCreate() +{ + auto impl = std::make_shared(); + CHECK_AND_RETURN_RET_LOG(impl != nullptr, nullptr, "Create VideoRefreshRatePrediction failed"); + auto handle = new VideoRefreshRatePredictionHandleImpl; + handle->obj = impl; + return static_cast(handle); +} + +void VideoRefreshRatePredictionDestroy(VideoRefreshRatePredictionHandle *handle) +{ + VPE_LOGD("VideoRefreshRatePredictionFwk Destroy"); + if (handle != nullptr) { + auto p = static_cast(handle); + delete p; + } +} + +int32_t VideoRefreshRatePredictionCheckSupport(VideoRefreshRatePredictionHandle *handle, const char *processName) +{ + auto p = static_cast(handle); + int32_t ret = p->obj->CheckVRRSupport(processName); + return ret; +} + +void VideoRefreshRatePredictionProcess(VideoRefreshRatePredictionHandle *handle, + OH_NativeBuffer* inputImageNativeBuffer, int videoFps, int codecType) +{ + auto p = static_cast(handle); + sptr inputImageSurfaceBuffer(SurfaceBuffer::NativeBufferToSurfaceBuffer(inputImageNativeBuffer)); + p->obj->Process(inputImageSurfaceBuffer, videoFps, codecType); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/capi/config.gni b/framework/capi/config.gni new file mode 100644 index 0000000000000000000000000000000000000000..3ec27a0f8abdf64c251835afce2b9d2d3115eccc --- /dev/null +++ b/framework/capi/config.gni @@ -0,0 +1,16 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//foundation/multimedia/video_processing_engine/config.gni") + +VIDEO_PROCESSING_ENGINE_NDK_DIR = "//foundation/multimedia/media_foundation/video_processing_engine" diff --git a/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp b/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a5f859bc31eca4b75ea41a840b4ec7fffc238d60 --- /dev/null +++ b/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp @@ -0,0 +1,233 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "colorspace_converter_image_native.h" + +#include + +#include "image_processing_capi_impl.h" +#include "detail_enhancer_common.h" +#include "detail_enhancer_image_fwk.h" +#include "image_processing_utils.h" +#include "surface_buffer.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::InitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + colorspaceConverter_ = ColorSpaceConverter::Create(nullptr, opengclContext_); + CHECK_AND_RETURN_RET_LOG(colorspaceConverter_ != nullptr, IMAGE_PROCESSING_ERROR_CREATE_FAILED, + "Create detail colorspaceConverter failed!"); + isInitialized_ = true; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::DeinitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + colorspaceConverter_ = nullptr; + isInitialized_ = false; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::SetParameter(const OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::GetParameter(OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::Process( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} +CM_ColorSpaceInfo GetColorSpaceInfo(const uint32_t colorSpaceType) +{ + CM_ColorSpaceInfo info; + info.primaries = static_cast((colorSpaceType & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET); + info.transfunc = static_cast((colorSpaceType & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET); + info.matrix = static_cast((colorSpaceType & MATRIX_MASK) >> MATRIX_OFFSET); + info.range = static_cast((colorSpaceType & RANGE_MASK) >> RANGE_OFFSET); + return info; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::ConvertColorSpace( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + ColorSpaceConverterParameter parameter; + parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC; + auto ret = ImageProcessingUtils::InnerErrorToNDK(colorspaceConverter_->SetParameter(parameter)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "cSetParameter failed!"); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "ConvertColorSpace image is not initialized!"); + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "sourceImage or destinationImage is null!"); + sptr sourceImageSurfaceBuffer = nullptr; + sptr destinationImageSurfaceBuffer = nullptr; + ret = ConvertPixelmapToSurfaceBuffer(sourceImage, sourceImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ConvertPixelmapToSurfaceBuffer(destinationImage, destinationImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ImageProcessingUtils::InnerErrorToNDK( + colorspaceConverter_->Process(sourceImageSurfaceBuffer, destinationImageSurfaceBuffer)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "process failed!"); + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::Compose( + const std::shared_ptr& sourceImage, + const std::shared_ptr& sourceGainmap, + std::shared_ptr& destinationImage) +{ + ColorSpaceConverterParameter parameter; + parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC; + auto ret = ImageProcessingUtils::InnerErrorToNDK(colorspaceConverter_->SetParameter(parameter)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "cSetParameter failed!"); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Compose image is not initialized!"); + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr && sourceGainmap != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "sourceImage or destinationImage is null!"); + sptr sourceImageSurfaceBuffer = nullptr; + sptr sourceGainmapSurfaceBuffer = nullptr; + sptr destinationImageSurfaceBuffer = nullptr; + ret = ConvertPixelmapToSurfaceBuffer(sourceImage, sourceImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ConvertPixelmapToSurfaceBuffer(sourceGainmap, sourceGainmapSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ConvertPixelmapToSurfaceBuffer(destinationImage, destinationImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ImageProcessingUtils::InnerErrorToNDK( + colorspaceConverter_->ComposeImage(sourceImageSurfaceBuffer, sourceGainmapSurfaceBuffer, + destinationImageSurfaceBuffer, false)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "process failed!"); + return ret; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::Decompose( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage, + std::shared_ptr& destinationGainmap) +{ + ColorSpaceConverterParameter parameter; + parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC; + auto ret = ImageProcessingUtils::InnerErrorToNDK(colorspaceConverter_->SetParameter(parameter)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "cSetParameter failed!"); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Decompose image is not initialized!"); + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr && destinationGainmap != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "sourceImage or destinationImage is null!"); + sptr sourceImageSurfaceBuffer = nullptr; + sptr destinationImageSurfaceBuffer = nullptr; + sptr destinationGainmapSurfaceBuffer = nullptr; + ret = ConvertPixelmapToSurfaceBuffer(sourceImage, sourceImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ConvertPixelmapToSurfaceBuffer(destinationImage, destinationImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ConvertPixelmapToSurfaceBuffer(destinationGainmap, destinationGainmapSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ImageProcessingUtils::InnerErrorToNDK( + colorspaceConverter_->DecomposeImage(sourceImageSurfaceBuffer, destinationImageSurfaceBuffer, + destinationGainmapSurfaceBuffer)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "process failed!"); + return ret; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::GenerateMetadata( + const std::shared_ptr& sourceImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::EnhanceDetail( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} +// LCOV_EXCL_START +static CM_ColorSpaceType ConvertColorSpaceType(ColorManager::ColorSpaceName colorSpace, bool base) +{ + switch (colorSpace) { + case ColorManager::ColorSpaceName::SRGB : + return CM_SRGB_FULL; + case ColorManager::ColorSpaceName::SRGB_LIMIT : + return CM_SRGB_LIMIT; + case ColorManager::ColorSpaceName::DISPLAY_P3 : + return CM_P3_FULL; + case ColorManager::ColorSpaceName::DISPLAY_P3_LIMIT : + return CM_P3_LIMIT; + case ColorManager::ColorSpaceName::BT2020 : + case ColorManager::ColorSpaceName::BT2020_HLG : + return CM_BT2020_HLG_FULL; + case ColorManager::ColorSpaceName::BT2020_HLG_LIMIT : + return CM_BT2020_HLG_LIMIT; + case ColorManager::ColorSpaceName::BT2020_PQ : + return CM_BT2020_PQ_FULL; + case ColorManager::ColorSpaceName::BT2020_PQ_LIMIT : + return CM_BT2020_PQ_LIMIT; + default: + return base ? CM_SRGB_FULL : CM_BT2020_HLG_FULL; + } + return base ? CM_SRGB_FULL : CM_BT2020_HLG_FULL; +} + +ImageProcessing_ErrorCode ColorspaceConverterImageNative::ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr& bufferImpl) +{ + auto it = IMAGE_FORMAT_MAP.find(pixelmap->GetPixelFormat()); + CHECK_AND_RETURN_RET_LOG(it != IMAGE_FORMAT_MAP.end(), IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "unsupported format: %{public}d", pixelmap->GetPixelFormat()); + CHECK_AND_RETURN_RET_LOG(pixelmap->GetAllocatorType() == AllocatorType::DMA_ALLOC, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "GetAllocatorType: %{public}d", pixelmap->GetAllocatorType()); + bufferImpl = reinterpret_cast(pixelmap->GetFd()); + CHECK_AND_RETURN_RET_LOG(bufferImpl != nullptr, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "bufferImpl is nullptr"); + auto colorspace = ConvertColorSpaceType(pixelmap->InnerGetGrColorSpace().GetColorSpaceName(), true); + auto colorspaceinfo = GetColorSpaceInfo(colorspace); + VPE_LOGD("colorspace : %{public}d", colorspace); + std::vector colorSpaceInfoVec; + colorSpaceInfoVec.resize(sizeof(CM_ColorSpaceInfo)); + auto ret = memcpy_s(colorSpaceInfoVec.data(), colorSpaceInfoVec.size(), &colorspaceinfo, + sizeof(CM_ColorSpaceInfo)); + CHECK_AND_RETURN_RET_LOG(ret == EOK, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "memcpy_s, err: %{public}d", ret); + auto err = bufferImpl->SetMetadata(ATTRKEY_COLORSPACE_INFO, colorSpaceInfoVec); + CHECK_AND_RETURN_RET_LOG(GSERROR_OK == err, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "Get hdr metadata type failed, err: %{public}d", err); + return IMAGE_PROCESSING_SUCCESS; +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/framework/capi/image_processing/colorspace_converter/include/colorspace_converter_image_native.h b/framework/capi/image_processing/colorspace_converter/include/colorspace_converter_image_native.h new file mode 100644 index 0000000000000000000000000000000000000000..1aff1c9d7530d044c996e80d12c0ebab1cac6339 --- /dev/null +++ b/framework/capi/image_processing/colorspace_converter/include/colorspace_converter_image_native.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLORSPACE_CONVERTER_IMAGE_NATIVE_H +#define COLORSPACE_CONVERTER_IMAGE_NATIVE_H + +#include +#include +#include + +#include "image_processing_native_template.h" +#include "image_processing_types.h" +#include "pixelmap_native_impl.h" + +#include "detail_enhancer_image.h" +#include "colorspace_converter.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class ColorspaceConverterImageNative : public ImageProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(ColorspaceConverterImageNative); + + ImageProcessing_ErrorCode InitializeInner() override; + ImageProcessing_ErrorCode DeinitializeInner() override; + ImageProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode Process(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode ConvertColorSpace(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode Compose(const std::shared_ptr& sourceImage, + const std::shared_ptr& sourceGainmap, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode Decompose(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage, + std::shared_ptr& destinationGainmap) override; + ImageProcessing_ErrorCode GenerateMetadata(const std::shared_ptr& sourceImage) override; + ImageProcessing_ErrorCode EnhanceDetail(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; +private: + ImageProcessing_ErrorCode CheckParameter(); + ImageProcessing_ErrorCode ConvertPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& bufferImpl); + ImageProcessing_ErrorCode ConvertSurfaceBufferToPixelmap(sptr buffer, + std::shared_ptr& pixelmap); + int LevelTransfer(int level, const std::unordered_map levelMap) const; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr colorspaceConverter_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // COLORSPACE_CONVERTER_IMAGE_NATIVE_H \ No newline at end of file diff --git a/framework/capi/image_processing/detail_enhance_napi.cpp b/framework/capi/image_processing/detail_enhance_napi.cpp new file mode 100644 index 0000000000000000000000000000000000000000..bb7e145a74be21b60b5a101976a36e5c6ebd6ef6 --- /dev/null +++ b/framework/capi/image_processing/detail_enhance_napi.cpp @@ -0,0 +1,331 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#undef LOG_DOMAIN +#define LOG_DOMAIN LOG_TAG_DOMAIN_ID_IMAGE +#undef LOG_TAG +#define LOG_TAG "DetailEnhanceNapi" + +#include "detail_enhance_napi.h" + +#include + +#include "image_log.h" +#include "image_trace.h" +#include "image_utils.h" +#include "log_tags.h" +#include "media_errors.h" +#include "memory_manager.h" +#include "native_avformat.h" +#include "pixelmap_native_impl.h" +#include "pixelmap_native.h" +#include "vpe_utils.h" + +#if defined(__OHOS__) + #include "hitrace_meter.h" + #define ATRACE_CALL() HITRACE_METER_NAME(HITRACE_TAG_GRAPHIC_AGP, __func__) + #define ATRACE_BEGIN(name) StartTrace(HITRACE_TAG_GRAPHIC_AGP, name) + #define ATRACE_END() FinishTrace(HITRACE_TAG_GRAPHIC_AGP) + #define ATRACE_INT(name, value) CountTrace(HITRACE_TAG_GRAPHIC_AGP, name, value) +#endif + +namespace { + constexpr uint32_t NUM_0 = 0; + constexpr uint32_t NUM_1 = 1; + constexpr uint32_t NUM_2 = 2; + constexpr uint32_t NUM_3 = 3; +} + +namespace OHOS { +namespace Media { +using namespace VideoProcessingEngine; +ImageType DetailEnhanceNapi::ParserImageType(napi_env env, napi_value argv) +{ + napi_value constructor = nullptr; + napi_value global = nullptr; + napi_status ret = napi_invalid_arg; + napi_get_global(env, &global); + ret = napi_get_named_property(env, global, "PixelMap", &constructor); + if (ret != napi_ok) { + IMAGE_LOGI("Get DetailEnhanceNapi property failed!"); + } + bool isInstance = false; + ret = napi_instanceof(env, argv, constructor, &isInstance); + if (ret == napi_ok && isInstance) { + return ImageType::TYPE_PIXEL_MAP; + } + return ImageType::TYPE_UNKNOWN; +} + +bool DetailEnhanceNapi::PrepareNapiEnv(napi_env env, napi_callback_info info, struct NapiValues* nVal) +{ + napi_get_undefined(env, &(nVal->result)); + nVal->status = napi_get_cb_info(env, info, &(nVal->argc), nVal->argv, &(nVal->thisVar), nullptr); + if (nVal->status != napi_ok) { + IMAGE_LOGE("fail to napi_get_cb_info"); + return false; + } + nVal->context = std::make_unique(); + return true; +} + + std::shared_ptr DetailEnhancerImageCreate() + { + auto detailEnh = DetailEnhancerImage::Create(); + if (detailEnh == nullptr) { + return nullptr; + } + return detailEnh; + } + +napi_value DetailEnhanceNapi::Init(napi_env env, napi_callback_info info) +{ + ATRACE_BEGIN("DetailEnhanceNapi::OH_ImageProcessing_Create"); + std::lock_guard lock(lock_); + napi_value result; + if (mDetailEnh != nullptr) { + napi_get_boolean(env, true, &result); + return result; + } + mDetailEnh = DetailEnhancerImageCreate(); + if (mDetailEnh == nullptr) { + IMAGE_LOGE("mDetailEnh == nullptr"); + return nullptr; + } + DetailEnhancerParameters param { + .uri = "", + .level = static_cast(DETAIL_ENH_LEVEL_HIGH), + .forceEve = 1, + }; + if (mDetailEnh->SetParameter(param)!= VPE_ALGO_ERR_OK) { + printf("Init failed!"); + napi_get_boolean(env, false, &result); + return result; + } + ATRACE_END(); + napi_get_boolean(env, true, &result); + return result; +} + +napi_value DetailEnhanceNapi::Destroy(napi_env env, napi_callback_info info) +{ + std::lock_guard lock(lock_); + ATRACE_BEGIN("DetailEnhanceNapi::Destroy"); + napi_value result; + if (mDetailEnh != nullptr) { + mDetailEnh = nullptr; + } + ATRACE_END(); + napi_get_boolean(env, true, &result); + return result; +} + +void DetailEnhanceNapi::SetDstPixelMapInfo(PixelMap &source, void* dstPixels, uint32_t dstPixelsSize, + std::unique_ptr& memory, PixelMap &dstPixelMap) +{ + AllocatorType sourceType = source.GetAllocatorType(); + if (sourceType != AllocatorType::DMA_ALLOC) { + IMAGE_LOGW("only support DMA"); + return; + } + if (memory == nullptr) { + IMAGE_LOGW("Invalid memory"); + return; + } + dstPixelMap.SetPixelsAddr(dstPixels, memory->extend.data, memory->data.size, sourceType, nullptr); + if (source.GetAllocatorType() == AllocatorType::DMA_ALLOC && source.IsHdr()) { + sptr sourceSurfaceBuffer(reinterpret_cast (source.GetFd())); + sptr dstSurfaceBuffer(reinterpret_cast (dstPixelMap.GetFd())); + VpeUtils::CopySurfaceBufferInfo(sourceSurfaceBuffer, dstSurfaceBuffer); + } +#ifdef IMAGE_COLORSPACE_FLAG + OHOS::ColorManager::ColorSpace colorspace = source.InnerGetGrColorSpace(); + dstPixelMap.InnerSetColorSpace(colorspace); +#endif +} + +bool DetailEnhanceNapi::AllocMemory(PixelMap &source, PixelMap &dstPixelMap) +{ + if (source.GetPixels() == nullptr || source.GetAllocatorType() != AllocatorType::DMA_ALLOC) { + IMAGE_LOGE("source pixelMap data invalid"); + return false; + } + int32_t bufferSize = source.GetByteCount(); + if (bufferSize <= 0) { + IMAGE_LOGE("CopyPixelMap parameter bufferSize:[%{public}d] error.", bufferSize); + return false; + } + ImageInfo dstImageInfo; + dstPixelMap.GetImageInfo(dstImageInfo); + MemoryData memoryData = {nullptr, static_cast(bufferSize), "Copy ImageData", + dstImageInfo.size, dstImageInfo.pixelFormat}; + std::unique_ptr memory = MemoryManager::CreateMemory(source.GetAllocatorType(), memoryData); + if (memory == nullptr) { + IMAGE_LOGE("invalid memory"); + return false; + } + void *dstPixels = memory->data.data; + if (dstPixels == nullptr) { + IMAGE_LOGE("source crop allocate memory fail allocatetype: %{public}d ", source.GetAllocatorType()); + return false; + } + SetDstPixelMapInfo(source, dstPixels, static_cast(bufferSize), memory, dstPixelMap); + return true; +} + +std::unique_ptr DetailEnhanceNapi::CreateDstPixelMap(PixelMap &source, const InitializationOptions &opts) +{ + std::unique_ptr dstPixelMap = std::make_unique(); + if (dstPixelMap == nullptr) { + IMAGE_LOGE("create pixelmap pointer fail"); + return nullptr; + } + ImageInfo srcImageInfo; + source.GetImageInfo(srcImageInfo); + ImageInfo dstImageInfo = { + .size = opts.size, + .pixelFormat = srcImageInfo.pixelFormat, + .alphaType = srcImageInfo.alphaType, + }; + if (dstPixelMap->SetImageInfo(dstImageInfo) != SUCCESS) { + return nullptr; + } + if (!AllocMemory(source, *dstPixelMap.get())) { + return nullptr; + } + return dstPixelMap; +} + +sptr DetailEnhanceNapi::GetSurfaceBufferFromDMAPixelMap( + const std::shared_ptr& pixelmap) +{ + if (pixelmap == nullptr || pixelmap->GetAllocatorType() != AllocatorType::DMA_ALLOC) { + return nullptr; + } + return reinterpret_cast(pixelmap->GetFd()); +} + +napi_value DetailEnhanceNapi::DetailEnhanceImpl(napi_env env, std::unique_ptr& context) +{ + if (context == nullptr) { + IMAGE_LOGE("context == nullptr"); + return nullptr; + } + if (mDetailEnh == nullptr) { + IMAGE_LOGE("mDetailEnh == nullptr"); + return nullptr; + } + ATRACE_BEGIN("DetailEnhanceNapi::CreatePixelMap"); + InitializationOptions opts { + .size = { + .width = static_cast(context->xArg), + .height = static_cast(context->yArg), + }, + }; + std::unique_ptr outputPtr = CreateDstPixelMap(*context->inputPixelMap, opts); + if (outputPtr == nullptr) { + IMAGE_LOGE("create failed"); + return nullptr; + } + std::shared_ptr dstPixelMap{std::move(outputPtr)}; + if (dstPixelMap == nullptr) { + IMAGE_LOGE("move failed"); + return nullptr; + } + auto input = GetSurfaceBufferFromDMAPixelMap(context->inputPixelMap); + auto output = GetSurfaceBufferFromDMAPixelMap(dstPixelMap); + if (mDetailEnh->Process(input, output) != VPE_ALGO_ERR_OK) { + IMAGE_LOGE("process failed"); + return nullptr; + } + return PixelMapNapi::CreatePixelMap(env, dstPixelMap); +} + +napi_value DetailEnhanceNapi::Process(napi_env env, napi_callback_info info) +{ + ImageTrace imageTrace("DetailEnhanceNapi::DetailEnhance"); + ATRACE_BEGIN("DetailEnhanceNapi::ProcessCheckEnv"); + std::lock_guard lock(lock_); + if (mDetailEnh == nullptr) { + napi_value result; + napi_get_boolean(env, false, &result); + return result; + } + ATRACE_END(); + ATRACE_BEGIN("DetailEnhanceNapi::prepareNapi"); + NapiValues nVal; + nVal.argc = NUM_3; + napi_value argValue[NUM_3] = {0}; + nVal.argv = argValue; + if (!PrepareNapiEnv(env, info, &nVal)) { + return nVal.result; + } + if (nVal.argc != NUM_3) { + IMAGE_LOGE("Invalid args count %{public}zu", nVal.argc); + return nullptr; + } else { + if (napi_ok != napi_get_value_double(env, nVal.argv[NUM_0], &(nVal.context->xArg))) { + IMAGE_LOGE("Arg 0 type mismatch"); + return nullptr; + } + if (napi_ok != napi_get_value_double(env, nVal.argv[NUM_1], &(nVal.context->yArg))) { + IMAGE_LOGE("Arg 1 type mismatch"); + return nullptr; + } + if (ParserImageType(env, argValue[NUM_2]) == ImageType::TYPE_PIXEL_MAP) { + nVal.context->inputPixelMap = PixelMapNapi::GetPixelMap(env, argValue[NUM_2]); + } + if (nVal.context->inputPixelMap == nullptr) { + return nullptr; + } + } + if (nVal.context->callbackRef == nullptr) { + napi_create_promise(env, &(nVal.context->deferred), &(nVal.result)); + } + ATRACE_END(); + std::chrono::steady_clock::time_point clProcess = std::chrono::steady_clock::now(); + napi_value outPixelmap = DetailEnhanceImpl(env, nVal.context); + std::chrono::duration clProcessD = std::chrono::steady_clock::now() - clProcess; + IMAGE_LOGI("detail enhance total cost: %{public}f ms", clProcessD.count()); + return outPixelmap; +} + +static napi_value Init(napi_env env, napi_value exports) +{ + static napi_property_descriptor desc[] = { + DECLARE_NAPI_FUNCTION("init", DetailEnhanceNapi::Init), + DECLARE_NAPI_FUNCTION("process", DetailEnhanceNapi::Process), + DECLARE_NAPI_FUNCTION("destroy", DetailEnhanceNapi::Destroy), + }; + + NAPI_CALL(env, napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]), desc)); + return exports; +} + +static napi_module detailEnhanceModule = { + .nm_version = 1, + .nm_flags = 0, + .nm_filename = nullptr, + .nm_register_func = Init, + .nm_modname = "multimedia.detailEnhancer", + .nm_priv = ((void *)0), + .reserved = {0}, +}; + +extern "C" __attribute__((constructor)) void DetailEnhanceRegisterModule(void) +{ + napi_module_register(&detailEnhanceModule); +} +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/framework/capi/image_processing/detail_enhance_napi_formal.cpp b/framework/capi/image_processing/detail_enhance_napi_formal.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7dc334fe23b38beaaab9b492ed2ed15f39d04c0a --- /dev/null +++ b/framework/capi/image_processing/detail_enhance_napi_formal.cpp @@ -0,0 +1,848 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#undef LOG_DOMAIN +#define LOG_DOMAIN 0xD002B3F +#undef LOG_TAG +#define LOG_TAG "VpeNapi" + +#include "detail_enhance_napi_formal.h" + +#include + +#include "image_processing_types.h" +#include "image_napi_utils.h" +#include "media_errors.h" +#include "memory_manager.h" +#include "native_avformat.h" +#include "pixelmap_native_impl.h" +#include "pixelmap_native.h" + +#include "detail_enhancer_common.h" +#include "vpe_log.h" +#include "vpe_trace.h" +#include "vpe_utils.h" + +namespace { +constexpr uint32_t NUM_0 = 0; +constexpr uint32_t NUM_1 = 1; +constexpr uint32_t NUM_2 = 2; +constexpr uint32_t NUM_3 = 3; +constexpr uint32_t NUM_4 = 4; +constexpr uint32_t NUM_5 = 5; +constexpr uint32_t NUM_6 = 6; +constexpr int32_t NEW_INSTANCE_ARGC = 1; +const std::string CLASS_NAME = "ImageProcessor"; +static std::mutex g_imageProcessorMutex{std::mutex()}; +static std::mutex g_detailLock{std::mutex()}; +static std::mutex g_contrastLock{std::mutex()}; +} + +namespace OHOS { +namespace Media { +using namespace VideoProcessingEngine; +thread_local napi_ref VpeNapi::constructor_ = nullptr; +thread_local napi_ref VpeNapi::qualityLevelTypeRef_ = nullptr; +thread_local std::shared_ptr VpeNapi::detailContext_ = nullptr; +thread_local std::shared_ptr VpeNapi::contrastContext_ = nullptr; +static std::shared_ptr g_detailEnh{}; +static std::shared_ptr g_contrastEnh{}; + +struct QualityLevelEnum { + std::string name; + int32_t numVal; + std::string strVal; +}; +static std::vector g_qualityLevels = { + {"NONE", OHOS::Media::VideoProcessingEngine::DETAIL_ENH_LEVEL_NONE, ""}, + {"LOW", OHOS::Media::VideoProcessingEngine::DETAIL_ENH_LEVEL_LOW, ""}, + {"MEDIUM", OHOS::Media::VideoProcessingEngine::DETAIL_ENH_LEVEL_MEDIUM, ""}, + {"HIGH", OHOS::Media::VideoProcessingEngine::DETAIL_ENH_LEVEL_HIGH, ""}, +}; + +void VpeNapi::ThrowExceptionError(napi_env env, const int32_t errCode, const std::string errMsg) +{ + std::string errCodeStr = std::to_string(errCode); + napi_throw_error(env, errCodeStr.c_str(), errMsg.c_str()); +} + +bool VpeNapi::PrepareNapiEnv(napi_env env, napi_callback_info info, NapiValues* nVal) +{ + CHECK_AND_RETURN_RET_LOG(nVal != nullptr, false, "nVal == nullptr"); + if (napi_get_undefined(env, &(nVal->result)) != napi_ok) { + VPE_LOGE("Get undefined result failed"); + return false; + } + nVal->status = napi_get_cb_info(env, info, &(nVal->argc), nVal->argv, &(nVal->thisVar), nullptr); + if (nVal->status != napi_ok) { + VPE_LOGE("fail to napi_get_cb_info"); + return false; + } + return true; +} + +ImageType VpeNapi::ParserImageType(napi_env env, napi_value argv) +{ + napi_value constructor = nullptr; + napi_value global = nullptr; + napi_status ret = napi_invalid_arg; + napi_get_global(env, &global); + ret = napi_get_named_property(env, global, "PixelMap", &constructor); + if (ret != napi_ok) { + VPE_LOGI("Get VpeNapi property failed!"); + } + bool isInstance = false; + ret = napi_instanceof(env, argv, constructor, &isInstance); + if (ret == napi_ok && isInstance) { + return ImageType::TYPE_PIXEL_MAP; + } + return ImageType::TYPE_UNKNOWN; +} + +bool VpeNapi::ConfigResolutionBasedOnRatio(napi_env env, napi_value& nVal, + std::shared_ptr context) +{ + CHECK_AND_RETURN_RET_LOG(context != nullptr, false, "context == nullptr"); + double zoomRatio; + if (napi_ok != napi_get_value_double(env, nVal, &zoomRatio)) { + VPE_LOGE("Arg 1 type mismatch"); + return false; + } + CHECK_AND_RETURN_RET_LOG(context->inputPixelMap != nullptr, false, "context->inputPixelMap == nullptr"); + context->xArg = zoomRatio * context->inputPixelMap->GetWidth(); + context->yArg = zoomRatio * context->inputPixelMap->GetHeight(); + VPE_LOGE("config resolution with ratio :%{public}d, %{public}d", + static_cast(context->xArg), static_cast(context->yArg)); + return true; +} + +bool VpeNapi::ConfigResolution(napi_env env, napi_value& width, napi_value& height, + std::shared_ptr context) +{ + CHECK_AND_RETURN_RET_LOG(context != nullptr, false, "context == nullptr"); + CHECK_AND_RETURN_RET_LOG(napi_ok == napi_get_value_double(env, width, &(context->xArg)), + false, "Arg 1 type mismatch"); + CHECK_AND_RETURN_RET_LOG(napi_ok == napi_get_value_double(env, height, &(context->yArg)), + false, "Arg 2 type mismatch"); + return true; +} + +bool VpeNapi::ParseDetailEnhanceParameter(napi_env env, napi_callback_info info) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceParseParameter"); + std::lock_guard lock(g_detailLock); + CHECK_AND_RETURN_RET_LOG(detailContext_ != nullptr, false, "detailContext_ == nullptr"); + NapiValues nVal; + nVal.argc = NUM_4; // Use the maximum value to initialize argc before executing PrepareNapiEnv + napi_value argValue[NUM_4] = {0}; + nVal.argv = argValue; + CHECK_AND_RETURN_RET_LOG(PrepareNapiEnv(env, info, &nVal), false, "PrepareNapiEnv failed"); + if (nVal.argc != NUM_2 && nVal.argc != NUM_3 && nVal.argc != NUM_4) { + VPE_LOGE("Invalid args count %{public}zu", nVal.argc); + return false; + } + if (ParserImageType(env, argValue[NUM_0]) == ImageType::TYPE_PIXEL_MAP) { + detailContext_->inputPixelMap = PixelMapNapi::GetPixelMap(env, argValue[NUM_0]); + } else { + VPE_LOGE("args0 is not pixelMap!"); + return false; + } + CHECK_AND_RETURN_RET_LOG(detailContext_->inputPixelMap != nullptr, false, "inputPixelMap is nullptr!"); + if (nVal.argc == NUM_2) { // 2 parameter: pixelmap scaleRatio + CHECK_AND_RETURN_RET_LOG(ConfigResolutionBasedOnRatio(env, nVal.argv[NUM_1], detailContext_), + false, "ConfigResolutionBasedOnRatio failed"); + detailContext_->qualityLevel = DETAIL_ENH_LEVEL_LOW; // default as low level + } + if (nVal.argc == NUM_3) { // 3 parameter: pixelmap scaleRatio level / pixelmap x y + double valueToCheck = 0; + CHECK_AND_RETURN_RET_LOG(napi_get_value_double(env, nVal.argv[NUM_2], &valueToCheck) == napi_ok, + false, "failed to parse"); + if (valueToCheck >= 0 && valueToCheck <= 3) { // if valueToCheck in [0,3], valueToCheck should be level. + CHECK_AND_RETURN_RET_LOG(ConfigResolutionBasedOnRatio(env, nVal.argv[NUM_1], detailContext_), false, + "ConfigResolutionBasedOnRatio failed"); + detailContext_->qualityLevel = static_cast(valueToCheck); + } else { + CHECK_AND_RETURN_RET_LOG(ConfigResolution(env, nVal.argv[NUM_1], nVal.argv[NUM_2], detailContext_), + false, "ConfigResolution failed"); + detailContext_->qualityLevel = DETAIL_ENH_LEVEL_LOW; // default as low level + } + } + if (nVal.argc == NUM_4) { // 4 parameter: pixelmap x y level + CHECK_AND_RETURN_RET_LOG(ConfigResolution(env, nVal.argv[NUM_1], nVal.argv[NUM_2], detailContext_), + false, "ConfigResolution failed"); + CHECK_AND_RETURN_RET_LOG(napi_ok == napi_get_value_int32(env, nVal.argv[NUM_3], + &(detailContext_->qualityLevel)), false, "Arg 3 type mismatch"); + } + return true; +} + +napi_value VpeNapi::InitializeEnvironment(napi_env env, napi_callback_info info) +{ + napi_value result; + napi_get_boolean(env, true, &result); + return result; +} + +napi_value VpeNapi::DeinitializeEnvironment(napi_env env, napi_callback_info info) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceDeinitializeEnvironment"); + std::lock_guard lock(g_detailLock); + napi_value result; + napi_get_boolean(env, true, &result); + return result; +} + +void VpeNapi::SetDstPixelMapInfo(PixelMap& source, void* dstPixels, uint32_t dstPixelsSize, + std::unique_ptr& memory, PixelMap& dstPixelMap) +{ + AllocatorType sourceType = source.GetAllocatorType(); + if (sourceType != AllocatorType::DMA_ALLOC) { + VPE_LOGW("only support DMA"); + return; + } + if (memory == nullptr) { + VPE_LOGW("Invalid memory"); + return; + } + dstPixelMap.SetPixelsAddr(dstPixels, memory->extend.data, memory->data.size, sourceType, nullptr); + if (source.GetAllocatorType() == AllocatorType::DMA_ALLOC && source.IsHdr()) { + sptr sourceSurfaceBuffer(reinterpret_cast (source.GetFd())); + sptr dstSurfaceBuffer(reinterpret_cast (dstPixelMap.GetFd())); + VpeUtils::CopySurfaceBufferInfo(sourceSurfaceBuffer, dstSurfaceBuffer); + } + OHOS::ColorManager::ColorSpace colorspace = source.InnerGetGrColorSpace(); + dstPixelMap.InnerSetColorSpace(colorspace); +} + +bool VpeNapi::AllocMemory(PixelMap& source, PixelMap& dstPixelMap, const InitializationOptions& opt) +{ + if (source.GetPixels() == nullptr) { + VPE_LOGE("pixels of source are not available"); + return false; + } + int32_t bufferSize = source.GetByteCount(); + if (bufferSize <= 0) { + VPE_LOGE("CopyPixelMap parameter bufferSize:[%{public}d] error.", bufferSize); + return false; + } + ImageInfo dstImageInfo; + dstPixelMap.GetImageInfo(dstImageInfo); + MemoryData memoryData = {nullptr, static_cast(bufferSize), "Copy ImageData", + dstImageInfo.size, dstImageInfo.pixelFormat}; + std::unique_ptr memory = MemoryManager::CreateMemory(source.GetAllocatorType(), memoryData); + if (memory == nullptr) { + VPE_LOGE("invalid memory"); + return false; + } + void* dstPixels = memory->data.data; + if (dstPixels == nullptr) { + VPE_LOGE("source crop allocate memory fail allocatetype: %{public}d ", source.GetAllocatorType()); + return false; + } + SetDstPixelMapInfo(source, dstPixels, static_cast(bufferSize), memory, dstPixelMap); + return true; +} + +std::unique_ptr VpeNapi::CreateDstPixelMap(PixelMap& source, const InitializationOptions& opts) +{ + if (source.GetAllocatorType() != AllocatorType::DMA_ALLOC) { + VPE_LOGE("alloc type of source is not dma, create with default method"); + return source.Create(source, opts); + } + std::unique_ptr dstPixelMap = std::make_unique(); + if (dstPixelMap == nullptr) { + VPE_LOGE("create pixelmap pointer fail"); + return nullptr; + } + ImageInfo srcImageInfo; + source.GetImageInfo(srcImageInfo); + ImageInfo dstImageInfo = { + .size = opts.size, + .pixelFormat = srcImageInfo.pixelFormat, + .alphaType = srcImageInfo.alphaType, + }; + if (dstPixelMap->SetImageInfo(dstImageInfo) != SUCCESS) { + return nullptr; + } + if (!AllocMemory(source, *dstPixelMap.get(), opts)) { + return nullptr; + } + return dstPixelMap; +} + +bool VpeNapi::ConvertPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& bufferImpl) +{ + BufferRequestConfig bfConfig = {}; + bfConfig.width = pixelmap->GetWidth(); + bfConfig.height = pixelmap->GetHeight(); + bfConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA | BUFFER_USAGE_MEM_MMZ_CACHE; + bfConfig.strideAlignment = bfConfig.width; + bfConfig.format = GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888; + bfConfig.timeout = 0; + bfConfig.colorGamut = GraphicColorGamut::GRAPHIC_COLOR_GAMUT_SRGB; + bfConfig.transform = GraphicTransformType::GRAPHIC_ROTATE_NONE; + CHECK_AND_RETURN_RET_LOG((bufferImpl->Alloc(bfConfig) == GSERROR_OK), false, "invalid OH_PixelmapNative image"); + return true; +} + +sptr VpeNapi::GetSurfaceBufferFromDMAPixelMap( + const std::shared_ptr& pixelmap) +{ + CHECK_AND_RETURN_RET_LOG(pixelmap != nullptr, nullptr, "pixelmap == nullptr"); + if (pixelmap->GetAllocatorType() == AllocatorType::DMA_ALLOC) { + return reinterpret_cast(pixelmap->GetFd()); + } + auto buffer = SurfaceBuffer::Create(); + CHECK_AND_RETURN_RET_LOG(buffer != nullptr, nullptr, "get surface buffer failed!"); + CHECK_AND_RETURN_RET_LOG(ConvertPixelmapToSurfaceBuffer(pixelmap, buffer), nullptr, + "get surface buffer failed!"); + return buffer; +} + +napi_value VpeNapi::Create(napi_env env, napi_callback_info info) +{ + napi_value result = nullptr; + napi_get_undefined(env, &result); + napi_value constructor = nullptr; + napi_status status = napi_ok; + status = napi_get_reference_value(env, constructor_, &constructor); + if (status == napi_ok) { + size_t argc = NEW_INSTANCE_ARGC; + napi_value argv[NEW_INSTANCE_ARGC] = { 0 }; + status = napi_new_instance(env, constructor, argc, argv, &result); + } + if (status != napi_ok) { + VPE_LOGE("create instance failed"); + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_CREATE_FAILED, "create instance failed"); + return nullptr; + } + VPE_LOGE("create done"); + return result; +} + +std::shared_ptr VpeNapi::PrepareDstPixelMap(napi_env env, DetailEnhanceContext* context) +{ + CHECK_AND_RETURN_RET_LOG(context->inputPixelMap->GetWidth() != 0 && context->inputPixelMap->GetHeight() != 0, + nullptr, "invalid resolution"); + float ratio = std::min(static_cast(context->xArg) / static_cast(context->inputPixelMap->GetWidth()), + static_cast(context->yArg) / static_cast(context->inputPixelMap->GetHeight())); + InitializationOptions opts { + .size = { + .width = ratio < 1.0 ? static_cast(context->xArg) : + static_cast(context->inputPixelMap->GetWidth()), + .height = ratio < 1.0 ? static_cast(context->yArg) : + static_cast(context->inputPixelMap->GetHeight()), + }, + }; + std::unique_ptr outputPtr = CreateDstPixelMap(*context->inputPixelMap, opts); + if (outputPtr == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_INVALID_VALUE, "create failed"); + return nullptr; + } + std::shared_ptr dstPixelMap{std::move(outputPtr)}; + return dstPixelMap; +} + +bool VpeNapi::InitDetailAlgo(napi_env env, int level) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceInitAlgo"); + CHECK_AND_RETURN_RET_LOG(g_detailEnh == nullptr, true, "DetailEnhancerImage handle has created"); + g_detailEnh = DetailEnhancerImage::Create(); + CHECK_AND_RETURN_RET_LOG(g_detailEnh != nullptr, false, "create DetailEnhancerImage failed"); + DetailEnhancerParameters param { + .uri = "", + .level = static_cast(level), + }; + if (g_detailEnh->SetParameter(param)!= VPE_ALGO_ERR_OK) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_CREATE_FAILED, "set parameter failed"); + return false; + } + return true; +} + +std::shared_ptr VpeNapi::DetailEnhanceImpl(napi_env env, DetailEnhanceContext* context) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceImpl"); + if (context == nullptr) { + VPE_LOGE("context == nullptr"); + return nullptr; + } + if (!InitDetailAlgo(env, context->qualityLevel)) { + VPE_LOGE("init algo failed"); + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_CREATE_FAILED, "init algo failed"); + return nullptr; + } + if (context->inputPixelMap == nullptr) { + VPE_LOGE("*context->inputPixelMap == nullptr"); + return nullptr; + } + auto dstPixelMap = PrepareDstPixelMap(env, context); + if (dstPixelMap == nullptr) { + VPE_LOGE("move failed"); + return nullptr; + } + auto output = GetSurfaceBufferFromDMAPixelMap(dstPixelMap); + auto input = GetSurfaceBufferFromDMAPixelMap(context->inputPixelMap); + CHECK_AND_RETURN_RET_LOG((g_detailEnh != nullptr && g_detailEnh->Process(input, output) == VPE_ALGO_ERR_OK), + nullptr, "process failed"); + VPE_LOGI("process done"); + return dstPixelMap; +} + +napi_value VpeNapi::EnhanceDetail(napi_env env, napi_callback_info info) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceProcess"); + if (detailContext_ == nullptr) { + detailContext_ = std::make_shared(); + } + CHECK_AND_RETURN_RET_LOG(detailContext_ != nullptr, nullptr, "context == nullptr"); + napi_deferred deferred; + napi_value promise; + NAPI_CALL(env, napi_create_promise(env, &deferred, &promise)); + CHECK_AND_RETURN_RET_LOG(ParseDetailEnhanceParameter(env, info), nullptr, "parse parameter failed"); + detailContext_->deferred = deferred; + napi_value resourceName; + napi_create_string_latin1(env, "Asynchronous processing", NAPI_AUTO_LENGTH, &resourceName); + napi_status status = napi_create_async_work(env, nullptr, resourceName, + [](napi_env env, void* data) { + DetailEnhanceContext* innerAsyncContext = reinterpret_cast(data); + if (innerAsyncContext == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, "innerAsyncContext is nullptr"); + return; + } + innerAsyncContext->outputPixelMap = DetailEnhanceImpl(env, innerAsyncContext); + }, + [](napi_env env, napi_status status, void* data) { + DetailEnhanceContext* innerAsyncContext = reinterpret_cast(data); + if (innerAsyncContext == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, "innerAsyncContext is nullptr"); + return; + } + napi_value outputPixelMapNapi = (innerAsyncContext->outputPixelMap == nullptr) ? + nullptr : PixelMapNapi::CreatePixelMap(env, innerAsyncContext->outputPixelMap); + if (outputPixelMapNapi == nullptr) { + VPE_LOGI("outputPixelMap is nullptr"); + return; + } + if (innerAsyncContext->deferred) { + napi_resolve_deferred(env, innerAsyncContext->deferred, outputPixelMapNapi); + } else { + napi_value callback = nullptr; + napi_get_reference_value(env, innerAsyncContext->callbackRef, &callback); + napi_call_function(env, nullptr, callback, 1, &(outputPixelMapNapi), nullptr); + napi_delete_reference(env, innerAsyncContext->callbackRef); + innerAsyncContext->callbackRef = nullptr; + } + napi_delete_async_work(env, innerAsyncContext->asyncWork); + delete innerAsyncContext; + }, reinterpret_cast(detailContext_.get()), &detailContext_->asyncWork); + CHECK_AND_RETURN_RET_LOG(status == napi_ok, nullptr, "create aysnc failed"); + napi_queue_async_work(env, detailContext_->asyncWork); + return promise; +} + +napi_value VpeNapi::EnhanceDetailSync(napi_env env, napi_callback_info info) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceProcess"); + if (detailContext_ == nullptr) { + detailContext_ = std::make_shared(); + } + CHECK_AND_RETURN_RET_LOG(detailContext_ != nullptr, nullptr, "context == nullptr"); + CHECK_AND_RETURN_RET_LOG(ParseDetailEnhanceParameter(env, info), nullptr, "parse parameter failed"); + std::shared_ptr outputPixelMap = DetailEnhanceImpl(env, detailContext_.get()); + if (outputPixelMap == nullptr) { + VPE_LOGE("DetailEnhanceImpl processed failed"); + return nullptr; + } + napi_value outputPixelMapNapi = PixelMapNapi::CreatePixelMap(env, outputPixelMap); + return outputPixelMapNapi; +} + +bool VpeNapi::UpdateMetadataBasedOnLcd(ContrastEnhanceContext* context) +{ + sptr surfaceBuffer = GetSurfaceBufferFromDMAPixelMap(context->inputPixelMap); + return g_contrastEnh->UpdateMetadataBasedOnLcd(context->displayArea, context->lcdWidth, context->lcdHeight, + surfaceBuffer); +} + +bool VpeNapi::UpdateMetadataBasedOnDetail(ContrastEnhanceContext* context) +{ + OHOS::Rect completePixelmapArea = { + .x = 0, + .y = 0, + .w = context->oriWidth, + .h = context->oriHeight, + }; + sptr surfaceBuffer = GetSurfaceBufferFromDMAPixelMap(context->inputPixelMap); + return g_contrastEnh->UpdateMetadataBasedOnDetail(context->displayArea, context->curPixelmapArea, + completePixelmapArea, surfaceBuffer, context->fullRatio); +} + +napi_value VpeNapi::SetDetailImage(napi_env env, napi_callback_info info) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceProcess"); + if (contrastContext_ == nullptr) { + contrastContext_ = std::make_shared(); + VPE_LOGI("create new contrast context"); + } + CHECK_AND_RETURN_RET_LOG(contrastContext_ != nullptr, nullptr, "context == nullptr"); + NapiValues nVal; + CHECK_AND_RETURN_RET_LOG(ParseDetailImageParameter(env, info, nVal), nullptr, "parse parameter failed"); + // 不管是不是区域解码,都先使用直方图的结果进行展示,之后再转变成bitmap的结果 + UpdateMetadataBasedOnLcd(contrastContext_.get()); + CallCallback(env, contrastContext_.get()); + // 如果在计算完成前坐标位置已经发生了变化,则不需要继续进行计算 + CHECK_AND_RETURN_RET_LOG(!contrastContext_->genFinalEffect, nullptr, "It's still moving. Stop processing"); + if (contrastContext_->callbackRef == nullptr) { + napi_create_promise(env, &(contrastContext_->deferred), &(nVal.result)); + } + napi_value resourceName; + napi_create_string_latin1(env, "Asynchronous processing", NAPI_AUTO_LENGTH, &resourceName); + napi_status status = napi_create_async_work( + env, nullptr, resourceName, + [](napi_env env, void* data) { + ContrastEnhanceContext* innerAsyncContext = reinterpret_cast(data); + if (innerAsyncContext == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, "innerAsyncContext == nullptr"); + return; + } + UpdateMetadataBasedOnDetail(innerAsyncContext); + }, + [](napi_env env, napi_status status, void* data) { + ContrastEnhanceContext* innerAsyncContext = reinterpret_cast(data); + CallCallback(env, innerAsyncContext); + if (status != napi_ok) { + VPE_LOGE("process failed"); + } + VPE_LOGI("process detail image done"); + }, + (void*)(contrastContext_.get()), &contrastContext_->asyncWork); + CHECK_AND_RETURN_RET_LOG(status == napi_ok, nullptr, "create aysnc failed"); + status = napi_queue_async_work(env, contrastContext_->asyncWork); + CHECK_AND_RETURN_RET_LOG(status == napi_ok, nullptr, "queue aysnc work failed"); + return nullptr; +} + +bool VpeNapi::GenerateRegionHist(napi_env env, ContrastEnhanceContext* context) +{ + CHECK_AND_RETURN_RET_LOG(context != nullptr, false, "context == nullptr"); + CHECK_AND_RETURN_RET_LOG(context->lcdPixelMap != nullptr, false, "lcdPixelMap == nullptr"); + auto input = GetSurfaceBufferFromDMAPixelMap(context->lcdPixelMap); + CHECK_AND_RETURN_RET_LOG(input != nullptr, false, "input == nullptr"); + CHECK_AND_RETURN_RET_LOG(g_contrastEnh != nullptr, false, "g_contrastEnh == nullptr"); + if (g_contrastEnh->GetRegionHist(input) != 0) { // 9ms + VPE_LOGE("processed failed. Clear LUT history"); + return false; + } + return true; +} + +bool VpeNapi::InitContrastAlgo(napi_env env) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhanceInitAlgo"); + CHECK_AND_RETURN_RET_LOG(g_contrastEnh == nullptr, true, "ContrastEnhancerImage handle has created"); + g_contrastEnh = ContrastEnhancerImage::Create(); + CHECK_AND_RETURN_RET_LOG(g_contrastEnh != nullptr, false, "create ContrastEnhancerImage failed"); + ContrastEnhancerParameters param { + .uri = "", + }; + if (g_contrastEnh->SetParameter(param)!= VPE_ALGO_ERR_OK) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_CREATE_FAILED, "set parameter failed"); + return false; + } + return true; +} + +bool VpeNapi::ParseRect(napi_env env, napi_value nVal, OHOS::Rect& rect) +{ + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(nVal, "x", rect.x), false, "Failed to parse start pos X"); + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(nVal, "y", rect.y), false, "Failed to parse start pos Y"); + napi_value tmpValue = nullptr; + CHECK_AND_RETURN_RET_LOG(GET_NODE_BY_NAME(nVal, "size", tmpValue), false, "Failed to parse resolution of rect"); + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(tmpValue, "height", rect.h), false, "Failed to parse height of rect"); + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(tmpValue, "width", rect.w), false, "Failed to parse width of rect"); + return true; +} + +bool VpeNapi::ParseSize(napi_env env, napi_value nVal) +{ + napi_value tmpValue = nullptr; + CHECK_AND_RETURN_RET_LOG(GET_NODE_BY_NAME(nVal, "size", tmpValue), false, "Failed to parse resolution of rect"); + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(nVal, "height", contrastContext_->oriHeight), + false, "Failed to parse height of rect"); + CHECK_AND_RETURN_RET_LOG(GET_INT32_BY_NAME(nVal, "width", contrastContext_->oriWidth), + false, "Failed to parse width of rect"); + return true; +} + +bool VpeNapi::ParseDetailImageParameter(napi_env env, napi_callback_info info, NapiValues& nVal) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhance"); + std::lock_guard lock(g_contrastLock); + nVal.argc = NUM_6; + napi_value argValue[NUM_6] = {0}; + nVal.argv = argValue; + if (!PrepareNapiEnv(env, info, &nVal)) { + return false; + } + if (nVal.argc != NUM_6) { + VPE_LOGE("Invalid args count %{public}zu", nVal.argc); + return false; + } else { + CHECK_AND_RETURN_RET_LOG(ParserImageType(env, argValue[NUM_0]) == ImageType::TYPE_PIXEL_MAP, + false, "Arg 0 type mismatch"); + contrastContext_->inputPixelMap = PixelMapNapi::GetPixelMap(env, argValue[NUM_0]); + CHECK_AND_RETURN_RET_LOG(contrastContext_->inputPixelMap != nullptr, false, + "contrastContext_->srcPixelMap == nullptr, resuse history"); + CHECK_AND_RETURN_RET_LOG(napi_get_value_int32(env, nVal.argv[NUM_1], + &(contrastContext_->pixelmapId)) == napi_ok, false, "Arg 1 type mismatch"); + CHECK_AND_RETURN_RET_LOG(ParseRect(env, argValue[NUM_2], contrastContext_->curPixelmapArea), false, + "parse pixelmap area failed"); + CHECK_AND_RETURN_RET_LOG(ParseRect(env, argValue[NUM_3], contrastContext_->displayArea), false, + "parse display area failed"); + CHECK_AND_RETURN_RET_LOG(ParseSize(env, argValue[NUM_4]), false, "parse resolution of original image failed"); + CHECK_AND_RETURN_RET_LOG(napi_get_value_bool(env, nVal.argv[NUM_5], + &(contrastContext_->genFinalEffect)) == napi_ok, false, "Arg 5 type mismatch"); + } + contrastContext_->fullRatio = std::min( + static_cast(contrastContext_->inputPixelMap->GetWidth()) / + static_cast(contrastContext_->curPixelmapArea.w), + static_cast(contrastContext_->inputPixelMap->GetHeight()) / + static_cast(contrastContext_->curPixelmapArea.h)); + return true; +} + +bool VpeNapi::ParseLCDParameter(napi_env env, napi_callback_info info, NapiValues& nVal) +{ + VPETrace vpeTrace("VpeNapi::DetailEnhance"); + std::lock_guard lock(g_contrastLock); + nVal.argc = NUM_3; + napi_value argValue[NUM_3] = {0}; + nVal.argv = argValue; + if (!PrepareNapiEnv(env, info, &nVal)) { + return false; + } + if (nVal.argc != NUM_3) { + VPE_LOGE("Invalid args count %{public}zu", nVal.argc); + return false; + } else { + CHECK_AND_RETURN_RET_LOG(ParserImageType(env, argValue[NUM_0]) == ImageType::TYPE_PIXEL_MAP, + false, "Arg 0 type is not pixelmap"); + contrastContext_->lcdPixelMap = PixelMapNapi::GetPixelMap(env, argValue[NUM_0]); + CHECK_AND_RETURN_RET_LOG(napi_get_value_int32(env, nVal.argv[NUM_1], + &(contrastContext_->contentId)) == napi_ok, false, "Failed to parse lcd param. Arg 1 type mismatch"); + CHECK_AND_RETURN_RET_LOG(napi_get_value_double(env, nVal.argv[NUM_2], + &(contrastContext_->defaultRatio)) == napi_ok, false, "Failed to parse lcd param. Arg 2 type mismatch"); + contrastContext_->lcdWidth = contrastContext_->lcdPixelMap->GetWidth(); + contrastContext_->lcdHeight = contrastContext_->lcdPixelMap->GetHeight(); + } + VPE_LOGI("update content info: lcdWidth:%{public}d, lcdHeight:%{public}d", + contrastContext_->lcdWidth, contrastContext_->lcdHeight); + return true; +} + +napi_value VpeNapi::SetLcdImage(napi_env env, napi_callback_info info) +{ + VPE_LOGI("set lcd image"); + VPETrace vpeTrace("VpeNapi::DetailEnhanceProcess"); + napi_value result = nullptr; + if (contrastContext_ == nullptr) { + contrastContext_ = std::make_shared(); + VPE_LOGI("create new contrast context"); + } + if (contrastContext_ == nullptr) { + VPE_LOGE("context == nullptr"); + return result; + } + NapiValues nVal; + if (!ParseLCDParameter(env, info, nVal)) { + VPE_LOGE("parse parameter failed"); + return result; + } + napi_value resourceName; + napi_create_string_latin1(env, "Asynchronous processing", NAPI_AUTO_LENGTH, &resourceName); + napi_status status = napi_create_async_work( + env, nullptr, resourceName, + [](napi_env env, void* data) { + ContrastEnhanceContext* innerAsyncContext = reinterpret_cast(data); + if (innerAsyncContext == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, "innerAsyncContext == nullptr"); + return; + } + GenerateRegionHist(env, innerAsyncContext); + }, + [](napi_env env, napi_status status, void* data) { + ContrastEnhanceContext* innerAsyncContext = reinterpret_cast(data); + CallCallback(env, innerAsyncContext); + if (status != napi_ok) { + VPE_LOGE("Failed to generate lut baseline for pcicture %{public}d", contrastContext_->contentId); + } + VPE_LOGI("Generate lut baseline for picture %{public}d successfully. Initial scaling ratio: %{public}f", + contrastContext_->contentId, contrastContext_->defaultRatio); + }, + (void*)(contrastContext_.get()), &contrastContext_->asyncWork); + CHECK_AND_RETURN_RET_LOG(status == napi_ok, nullptr, "create aysnc failed"); + CHECK_AND_RETURN_RET_LOG(napi_queue_async_work(env, contrastContext_->asyncWork) == napi_ok, + nullptr, "queue aysnc work failed"); + return result; +} + +napi_value VpeNapi::RegisterCallback(napi_env env, napi_callback_info info) +{ + size_t argc = 1; + napi_value argv[1]; + napi_get_cb_info(env, info, &argc, argv, nullptr, nullptr); + if (contrastContext_->callbackRef != nullptr) { + napi_delete_reference(env, contrastContext_->callbackRef); + } + napi_create_reference(env, argv[0], 1, &(contrastContext_->callbackRef)); + return nullptr; +} + +napi_value VpeNapi::CallCallback(napi_env env, ContrastEnhanceContext* context) +{ + bool noCall = true; + if (noCall) { + return nullptr; + } + + if (context->callbackRef != nullptr) { + napi_value global; + napi_get_global(env, &global); + napi_value callbackFunc; + napi_get_reference_value(env, context->callbackRef, &callbackFunc); + napi_value result; + napi_call_function(env, global, callbackFunc, 0, nullptr, &result); + } + return nullptr; +} + +napi_value VpeNapi::Constructor(napi_env env, napi_callback_info info) +{ + napi_value undefineVar = nullptr; + napi_get_undefined(env, &undefineVar); + + napi_status status; + napi_value thisVar = nullptr; + napi_get_undefined(env, &thisVar); + size_t argc = NEW_INSTANCE_ARGC; + napi_value argv[NEW_INSTANCE_ARGC] = { 0 }; + status = napi_get_cb_info(env, info, &argc, argv, &thisVar, nullptr); + if (status != napi_ok || thisVar == nullptr) { + return undefineVar; + } + VpeNapi* pDetailEnhanceNapi = new VpeNapi(); + status = napi_wrap_with_size(env, thisVar, reinterpret_cast(pDetailEnhanceNapi), + VpeNapi::Destructor, nullptr, nullptr, static_cast(sizeof(VpeNapi))); + CHECK_AND_RETURN_RET_LOG(status == napi_ok, undefineVar, "Failure wrapping js to native napi"); + return thisVar; +} + +void VpeNapi::Destructor(napi_env env, void* nativeObject, void* finalize) +{ + if (nativeObject != nullptr) { + std::lock_guard lock(g_imageProcessorMutex); + delete reinterpret_cast(nativeObject); + nativeObject = nullptr; + } +} + +napi_value VpeNapi::DoInitAfter(napi_env env, napi_value exports, napi_value constructor, + size_t property_count, const napi_property_descriptor* properties) +{ + napi_value global = nullptr; + CHECK_AND_RETURN_RET_LOG(napi_get_global(env, &global) == napi_ok, nullptr, "Init:get global fail"); + CHECK_AND_RETURN_RET_LOG(napi_set_named_property(env, global, CLASS_NAME.c_str(), constructor) == napi_ok, + nullptr, "Init:set global named property fail"); + CHECK_AND_RETURN_RET_LOG(napi_set_named_property(env, exports, CLASS_NAME.c_str(), constructor) == napi_ok, + nullptr, "set named property fail"); + CHECK_AND_RETURN_RET_LOG(napi_define_properties(env, exports, property_count, properties) == napi_ok, + nullptr, "define properties fail"); + return exports; +} + +napi_value VpeNapi::CreateEnumTypeObject(napi_env env, napi_valuetype type, napi_ref* ref, + std::vector& imageEnumMap) +{ + napi_value result = nullptr; + napi_status status; + int32_t refCount = 1; + std::string propName; + status = napi_create_object(env, &result); + if (status == napi_ok) { + for (auto imgEnum : imageEnumMap) { + napi_value enumNapiValue = nullptr; + if (type == napi_string) { + status = napi_create_string_utf8(env, imgEnum.strVal.c_str(), NAPI_AUTO_LENGTH, &enumNapiValue); + } else if (type == napi_number) { + status = napi_create_int32(env, imgEnum.numVal, &enumNapiValue); + } else { + VPE_LOGE("Unsupported type %{public}d!", type); + } + if (status == napi_ok && enumNapiValue != nullptr) { + status = napi_set_named_property(env, result, imgEnum.name.c_str(), enumNapiValue); + } + if (status != napi_ok) { + VPE_LOGE("Failed to add named prop!"); + break; + } + } + if (status == napi_ok) { + status = napi_create_reference(env, result, refCount, ref); + if (status == napi_ok) { + return result; + } + } + } + VPE_LOGE("CreateEnumTypeObject is Failed!"); + napi_get_undefined(env, &result); + return result; +} + +std::vector VpeNapi::RegisterNapi() +{ + std::vector props = { + DECLARE_NAPI_FUNCTION("enhanceDetail", VpeNapi::EnhanceDetail), + DECLARE_NAPI_FUNCTION("enhanceDetailSync", VpeNapi::EnhanceDetailSync), + DECLARE_NAPI_FUNCTION("setLcdImage", VpeNapi::SetLcdImage), + DECLARE_NAPI_FUNCTION("setDetailImage", VpeNapi::SetDetailImage), + DECLARE_NAPI_FUNCTION("registerCallback", VpeNapi::RegisterCallback) + }; + return props; +} + +napi_value VpeNapi::Init(napi_env env, napi_value exports) +{ + std::vector props = VpeNapi::RegisterNapi(); + napi_value constructor = nullptr; + CHECK_AND_RETURN_RET_LOG(napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, + VpeNapi::Constructor, nullptr, props.size(), props.data(), &constructor) == napi_ok, + nullptr, "define class fail"); + CHECK_AND_RETURN_RET_LOG(napi_create_reference(env, constructor, 1, &constructor_) == napi_ok, + nullptr, "create reference fail"); + + static napi_property_descriptor desc[] = { + DECLARE_NAPI_PROPERTY("QualityLevel", + CreateEnumTypeObject(env, napi_number, &qualityLevelTypeRef_, g_qualityLevels)), + DECLARE_NAPI_FUNCTION("create", VpeNapi::Create), + DECLARE_NAPI_FUNCTION("initializeEnvironment", VpeNapi::InitializeEnvironment), + DECLARE_NAPI_FUNCTION("deinitializeEnvironment", VpeNapi::DeinitializeEnvironment), + }; + auto result = DoInitAfter(env, exports, constructor, sizeof(desc) / sizeof(desc[0]), desc); + return result; +} +} +} \ No newline at end of file diff --git a/framework/capi/image_processing/detail_enhancer/detail_enhancer_image_native.cpp b/framework/capi/image_processing/detail_enhancer/detail_enhancer_image_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..87ea67c957b4198176b9f218cd0c53c942a358c3 --- /dev/null +++ b/framework/capi/image_processing/detail_enhancer/detail_enhancer_image_native.cpp @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail_enhancer_image_native.h" + +#include + +#include "detail_enhancer_common.h" +#include "detail_enhancer_image_fwk.h" +#include "image_processing_utils.h" +#include "surface_buffer.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::unordered_map NDK_TO_INNER_LEVEL_MAP = { + { IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_NONE, DETAIL_ENH_LEVEL_NONE }, + { IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_LOW, DETAIL_ENH_LEVEL_LOW }, + { IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_MEDIUM, DETAIL_ENH_LEVEL_MEDIUM }, + { IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH, DETAIL_ENH_LEVEL_HIGH }, +}; +const std::unordered_map INNER_TO_NDK_LEVEL_MAP = { + { DETAIL_ENH_LEVEL_NONE, IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_NONE }, + { DETAIL_ENH_LEVEL_LOW, IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_LOW }, + { DETAIL_ENH_LEVEL_MEDIUM, IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_MEDIUM }, + { DETAIL_ENH_LEVEL_HIGH, IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH }, +}; +} + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +ImageProcessing_ErrorCode DetailEnhancerImageNative::InitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + detailEnhancer_ = DetailEnhancerImage::Create(); + CHECK_AND_RETURN_RET_LOG(detailEnhancer_ != nullptr, IMAGE_PROCESSING_ERROR_CREATE_FAILED, + "Create detail enhancement failed!"); + isInitialized_ = true; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode DetailEnhancerImageNative::DeinitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + detailEnhancer_ = nullptr; + isInitialized_ = false; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode DetailEnhancerImageNative::SetParameter(const OHOS::Media::Format& parameter) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Detail enhancer image is not initialized!"); + int level; + CHECK_AND_RETURN_RET_LOG(parameter.GetIntValue(IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, level), + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "No quality level!"); + int innerLevel = LevelTransfer(level, NDK_TO_INNER_LEVEL_MAP); + CHECK_AND_RETURN_RET_LOG(innerLevel != -1, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "Quality level is invalid!"); + DetailEnhancerParameters param{}; + param.level = static_cast(innerLevel); + return ImageProcessingUtils::InnerErrorToNDK(detailEnhancer_->SetParameter(param)); +} + +ImageProcessing_ErrorCode DetailEnhancerImageNative::GetParameter(OHOS::Media::Format& parameter) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Detail enhancer image is not initialized!"); + DetailEnhancerParameters param { + .uri = "", + .level{}, + }; + auto ret = detailEnhancer_->GetParameter(param); + CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ImageProcessingUtils::InnerErrorToNDK(ret), "param is not set"); + int level = LevelTransfer(param.level, INNER_TO_NDK_LEVEL_MAP); + CHECK_AND_RETURN_RET_LOG(level != -1, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "Quality level is invalid!"); + parameter.PutIntValue(IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, level); + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode DetailEnhancerImageNative::Process(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Detail enhancer image is not initialized!"); + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "sourceImage or destinationImage is null!"); + auto sourceImageSurfaceBuffer = ImageProcessingUtils::GetSurfaceBufferFromPixelMap(sourceImage); + CHECK_AND_RETURN_RET_LOG(sourceImageSurfaceBuffer != nullptr, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "sourceImageSurfaceBuffer create failed!"); + auto destinationImageSurfaceBuffer = ImageProcessingUtils::GetSurfaceBufferFromPixelMap(destinationImage); + CHECK_AND_RETURN_RET_LOG(destinationImageSurfaceBuffer != nullptr, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "destinationImageSurfaceBuffer create failed!"); + auto ret = CheckParameter(); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "check parameter failed!"); + ret = ImageProcessingUtils::InnerErrorToNDK( + detailEnhancer_->Process(sourceImageSurfaceBuffer, destinationImageSurfaceBuffer, true)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "process failed!"); + ret = ImageProcessingUtils::SetSurfaceBufferToPixelMap(destinationImageSurfaceBuffer, destinationImage); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to pixelmap failed!"); + return ret; +} + +ImageProcessing_ErrorCode DetailEnhancerImageNative::CheckParameter() +{ + DetailEnhancerParameters param { + .uri = "", + .level{}, + }; + if (detailEnhancer_->GetParameter(param) == VPE_ALGO_ERR_OK) { + return IMAGE_PROCESSING_SUCCESS; + } + param.level = static_cast(IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + CHECK_AND_RETURN_RET_LOG(detailEnhancer_->SetParameter(param) == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_PROCESS_FAILED, "set default enhance level failed!!"); + return IMAGE_PROCESSING_SUCCESS; +} + +int DetailEnhancerImageNative::LevelTransfer(int level, const std::unordered_map levelMap) const +{ + auto it = levelMap.find(level); + if (it == levelMap.end()) [[unlikely]] { + VPE_LOGE("Invalid input level:%{public}d", level); + return -1; + } + return it->second; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/framework/capi/image_processing/detail_enhancer/include/detail_enhancer_image_native.h b/framework/capi/image_processing/detail_enhancer/include/detail_enhancer_image_native.h new file mode 100644 index 0000000000000000000000000000000000000000..663ef2b559c775488123c194e3756f5eb38eb355 --- /dev/null +++ b/framework/capi/image_processing/detail_enhancer/include/detail_enhancer_image_native.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_IMAGE_NATIVE_H +#define DETAIL_ENHANCER_IMAGE_NATIVE_H + +#include +#include +#include + +#include "image_processing_native_template.h" +#include "image_processing_types.h" +#include "pixelmap_native_impl.h" + +#include "detail_enhancer_image.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class DetailEnhancerImageNative : public ImageProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(DetailEnhancerImageNative); + + ImageProcessing_ErrorCode InitializeInner() override; + ImageProcessing_ErrorCode DeinitializeInner() override; + ImageProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode Process(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; + +private: + ImageProcessing_ErrorCode CheckParameter(); + int LevelTransfer(int level, const std::unordered_map levelMap) const; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr detailEnhancer_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_IMAGE_NATIVE_H \ No newline at end of file diff --git a/framework/capi/image_processing/image_environment_native.cpp b/framework/capi/image_processing/image_environment_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..88e7b8eb3b10f0327b8b6db707a2e4ba96402272 --- /dev/null +++ b/framework/capi/image_processing/image_environment_native.cpp @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_environment_native.h" + +#include "image_processing_utils.h" +#include "video_processing_client.h" +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +ImageEnvironmentNative& ImageEnvironmentNative::Get() +{ + static ImageEnvironmentNative instance{}; + return instance; +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::Initialize() +{ + std::lock_guard lock(lock_); + if (isExplicitInit_) [[unlikely]] { + VPE_LOGE("Repeated initialization of the image environment!"); + return IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + isExplicitInit_ = true; + return InitializeLocked(); +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::Deinitialize() +{ + std::lock_guard lock(lock_); + if (!isExplicitInit_) [[unlikely]] { + VPE_LOGE("Repeated deinitialization of the image environment!"); + return IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + isExplicitInit_ = false; + return DeinitializeLocked(); +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::InitializeByDefault() +{ + std::lock_guard lock(lock_); + return InitializeLocked(); +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::DeinitializeByDefault() +{ + std::lock_guard lock(lock_); + return DeinitializeLocked(); +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::InitializeLocked() +{ + if (referenceCount_ > 0) [[likely]] { + VPE_LOGD("already init(cnt:%{public}d)", referenceCount_); + referenceCount_++; + return IMAGE_PROCESSING_SUCCESS; + } + VPE_LOGD("start to initialize..."); + ImageProcessing_ErrorCode result = InitializeEnvLocked(); + VPE_LOGD("initialize ret:%{public}s", ImageProcessingUtils::ToString(result).c_str()); + if (result == IMAGE_PROCESSING_SUCCESS) [[likely]] { + referenceCount_++; + } + return result; +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::DeinitializeLocked() +{ + if (referenceCount_ > 1) [[likely]] { + VPE_LOGD("environment is still in use(cnt:%{public}d)", referenceCount_); + referenceCount_--; + return IMAGE_PROCESSING_SUCCESS; + } + VPE_LOGD("start to deinitialize..."); + ImageProcessing_ErrorCode result = DeinitializeEnvLocked(); + VPE_LOGD("deinitialize ret:%{public}s", ImageProcessingUtils::ToString(result).c_str()); + if (result == IMAGE_PROCESSING_SUCCESS) [[likely]] { + if (referenceCount_ <= 0) { + VPE_LOGE("referenceCount_ is less than 1, no need to deinitialize"); + return IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + referenceCount_--; + } + return result; +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::InitializeEnvLocked() +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageEnvironmentNative::DeinitializeEnvLocked() +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + return IMAGE_PROCESSING_SUCCESS; +} diff --git a/framework/capi/image_processing/image_processing_capi_impl.cpp b/framework/capi/image_processing/image_processing_capi_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3f03cc3e2b4cd33820d790c9edb680912298f707 --- /dev/null +++ b/framework/capi/image_processing/image_processing_capi_impl.cpp @@ -0,0 +1,382 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing_capi_impl.h" + +#include "vpe_log.h" + +#include "image_environment_native.h" +#include "image_processing_impl.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +const int32_t IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION = 0x1; +const int32_t IMAGE_PROCESSING_TYPE_COMPOSITION = 0x2; +const int32_t IMAGE_PROCESSING_TYPE_DECOMPOSITION = 0x4; +const int32_t IMAGE_PROCESSING_TYPE_METADATA_GENERATION = 0x8; +const int32_t IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER = 0x10; +const char* IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL = "QualityLevel"; + +namespace { +ImageProcessing_ErrorCode CallImageProcessing(OH_ImageProcessing* imageProcessor, + std::function&)>&& operation) +{ + CHECK_AND_RETURN_RET_LOG(imageProcessor != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "imageProcessor is null!"); + auto imageProcessing = imageProcessor->GetImageProcessing(); + CHECK_AND_RETURN_RET_LOG(imageProcessing != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "imageProcessor is invalid!"); + return operation(imageProcessing); +} +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenCLInit() +{ + void *OpenclFoundationHandle = nullptr; + std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; + auto ret = access(path.c_str(), F_OK); + if (ret != 0) { + VPE_LOGW("access = %d path = %s", ret, path.c_str()); + } else { + constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length + char deviceName[DEVICE_NAME_LENGTH]; + auto status = SetupOpencl(&OpenclFoundationHandle, "HUAWEI", deviceName); + CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "GetOpenCLContext SetupOpencl fail!"); + } + openclContext_ = reinterpret_cast(OpenclFoundationHandle); + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenGLInit() +{ + auto status = SetupOpengl(openglContext_); + CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "OpenGLInit SetupOpengl fail!"); + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::InitializeEnvironment() +{ + CHECK_AND_RETURN_RET_LOG(OpenCLInit() == IMAGE_PROCESSING_SUCCESS, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "OpenCLInit failed!"); + CHECK_AND_RETURN_RET_LOG(OpenGLInit() == IMAGE_PROCESSING_SUCCESS, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "OpenGLInit failed!"); + + return ImageEnvironmentNative::Get().Initialize(); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::DeinitializeEnvironment() +{ + return ImageEnvironmentNative::Get().Deinitialize(); +} + +void ImageProcessingCapiImpl::LoadLibrary() +{ + std::lock_guard lock(lock_); + if (usedInstance_ == 0 && mLibHandle == nullptr) { + std::string path = "libvideoprocessingengine.z.so"; + mLibHandle = dlopen(path.c_str(), RTLD_NOW); + } + usedInstance_++; +} +void ImageProcessingCapiImpl::UnloadLibrary() +{ + std::lock_guard lock(lock_); + usedInstance_--; + if ((usedInstance_ == 0) && (mLibHandle != nullptr)) { + dlclose(mLibHandle); + mLibHandle = nullptr; + } +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::LoadAlgo() +{ + CHECK_AND_RETURN_RET_LOG(mLibHandle != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Library is nullptr!"); + std::pair funcs[] = { + { "ImageProcessing_IsColorSpaceConversionSupported", isColorSpaceConversionSupported_}, + { "ImageProcessing_IsCompositionSupported", isCompositionSupported_ }, + { "ImageProcessing_IsDecompositionSupported", isDecompositionSupported_ }, + }; + for (auto& func : funcs) { + func.second = reinterpret_cast(dlsym(mLibHandle, func.first.c_str())); + CHECK_AND_RETURN_RET_LOG(func.second != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Failed to locate %s in - %s", func.first.c_str(), dlerror()); + } + isMetadataGenSupported_ = reinterpret_cast(dlsym(mLibHandle, + "ImageProcessing_IsMetadataGenerationSupported")); + CHECK_AND_RETURN_RET_LOG(isMetadataGenSupported_ != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Failed to locate %s in - %s", "ImageProcessing_IsMetadataGenerationSupported", + dlerror()); + return IMAGE_PROCESSING_SUCCESS; +} + +bool ImageProcessingCapiImpl::CheckColorSpaceConversionSupport( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceImageInfo != nullptr, false, "sourceImageInfo is nullptr!"); + CHECK_AND_RETURN_RET_LOG(destinationImageInfo != nullptr, false, "destinationImageInfo is nullptr!"); + auto status = LoadAlgo(); + CHECK_AND_RETURN_RET_LOG(status == IMAGE_PROCESSING_SUCCESS, false, "LoadAlgo faild"); + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo; + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo outputInfo; + auto iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(sourceImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "src IMAGE_FORMAT_MAP find failed!"); + inputInfo.pixelFormat = iterPixelFormat->second; + iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(destinationImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "dst IMAGE_FORMAT_MAP find failed!"); + outputInfo.pixelFormat = iterPixelFormat->second; + auto iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + sourceImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "src HDR_METADATA_TYPE_MAP find failed!"); + inputInfo.metadataType = iterMetadataType->second; + iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + destinationImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "dst HDR_METADATA_TYPE_MAP find failed!"); + outputInfo.metadataType = iterMetadataType->second; + auto iterColorSpace = COLORSPACE_MAP.find(static_cast(sourceImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + inputInfo.colorSpace = iterColorSpace->second; + iterColorSpace = COLORSPACE_MAP.find(static_cast(destinationImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + outputInfo.colorSpace = iterColorSpace->second; + return isColorSpaceConversionSupported_(inputInfo, outputInfo); +} + +bool ImageProcessingCapiImpl::CheckCompositionSupport( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceImageInfo != nullptr, false, "sourceImageInfo is nullptr!"); + CHECK_AND_RETURN_RET_LOG(destinationImageInfo != nullptr, false, "destinationImageInfo is nullptr!"); + CHECK_AND_RETURN_RET_LOG(sourceGainmapInfo != nullptr, false, "sourceGainmapInfo is nullptr!"); + auto status = LoadAlgo(); + CHECK_AND_RETURN_RET_LOG(status == IMAGE_PROCESSING_SUCCESS, false, "LoadAlgo faild"); + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo; + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo outputInfo; + auto iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(sourceImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "src IMAGE_FORMAT_MAP find failed!"); + inputInfo.pixelFormat = iterPixelFormat->second; + iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(destinationImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "dst IMAGE_FORMAT_MAP find failed!"); + outputInfo.pixelFormat = iterPixelFormat->second; + auto iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + sourceImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "src HDR_METADATA_TYPE_MAP find failed!"); + inputInfo.metadataType = iterMetadataType->second; + iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + destinationImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "dst HDR_METADATA_TYPE_MAP find failed!"); + outputInfo.metadataType = iterMetadataType->second; + auto iterColorSpace = COLORSPACE_MAP.find(static_cast(sourceImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + inputInfo.colorSpace = iterColorSpace->second; + iterColorSpace = COLORSPACE_MAP.find(static_cast(destinationImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + outputInfo.colorSpace = iterColorSpace->second; + return isCompositionSupported_(inputInfo, outputInfo); +} + +bool ImageProcessingCapiImpl::CheckDecompositionSupport( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceImageInfo != nullptr, false, "sourceImageInfo is nullptr!"); + CHECK_AND_RETURN_RET_LOG(destinationImageInfo != nullptr, false, "destinationImageInfo is nullptr!"); + CHECK_AND_RETURN_RET_LOG(destinationGainmapInfo != nullptr, false, "destinationGainmapInfo is nullptr!"); + auto status = LoadAlgo(); + CHECK_AND_RETURN_RET_LOG(status == IMAGE_PROCESSING_SUCCESS, false, "LoadAlgo faild"); + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo; + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo outputInfo; + auto iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(sourceImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "src IMAGE_FORMAT_MAP find failed!"); + inputInfo.pixelFormat = iterPixelFormat->second; + iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(destinationImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "dst IMAGE_FORMAT_MAP find failed!"); + outputInfo.pixelFormat = iterPixelFormat->second; + auto iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + sourceImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "src HDR_METADATA_TYPE_MAP find failed!"); + inputInfo.metadataType = iterMetadataType->second; + iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + destinationImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "dst HDR_METADATA_TYPE_MAP find failed!"); + outputInfo.metadataType = iterMetadataType->second; + auto iterColorSpace = COLORSPACE_MAP.find(static_cast(sourceImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "src colorSpace find failed!"); + inputInfo.colorSpace = iterColorSpace->second; + iterColorSpace = COLORSPACE_MAP.find(static_cast(destinationImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + outputInfo.colorSpace = iterColorSpace->second; + return isDecompositionSupported_(inputInfo, outputInfo); +} + +bool ImageProcessingCapiImpl::CheckMetadataGenerationSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceImageInfo != nullptr, false, "sourceImageInfo is nullptr!"); + auto status = LoadAlgo(); + CHECK_AND_RETURN_RET_LOG(status == IMAGE_PROCESSING_SUCCESS, false, "LoadAlgo faild"); + OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo; + auto iterPixelFormat = IMAGE_FORMAT_MAP.find(static_cast(sourceImageInfo->pixelFormat)); + CHECK_AND_RETURN_RET_LOG(iterPixelFormat != IMAGE_FORMAT_MAP.end(), false, "src IMAGE_FORMAT_MAP find failed!"); + inputInfo.pixelFormat = iterPixelFormat->second; + + auto iterMetadataType = HDR_METADATA_TYPE_MAP.find(static_cast( + sourceImageInfo->metadataType)); + CHECK_AND_RETURN_RET_LOG(iterMetadataType != HDR_METADATA_TYPE_MAP.end(), false, + "src HDR_METADATA_TYPE_MAP find failed!"); + inputInfo.metadataType = iterMetadataType->second; + + auto iterColorSpace = COLORSPACE_MAP.find(static_cast(sourceImageInfo->colorSpace)); + CHECK_AND_RETURN_RET_LOG(iterColorSpace != COLORSPACE_MAP.end(), false, "dst colorSpace find failed!"); + inputInfo.colorSpace = iterColorSpace->second; + return isMetadataGenSupported_(inputInfo); +} + +bool ImageProcessingCapiImpl::IsColorSpaceConversionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + LoadLibrary(); + auto flag = CheckColorSpaceConversionSupport(sourceImageInfo, destinationImageInfo); + UnloadLibrary(); + return flag; +} + +bool ImageProcessingCapiImpl::IsCompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + LoadLibrary(); + auto flag = CheckCompositionSupport(sourceImageInfo, sourceGainmapInfo, destinationImageInfo); + UnloadLibrary(); + return flag; +} + +bool ImageProcessingCapiImpl::IsDecompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) +{ + LoadLibrary(); + auto flag = CheckDecompositionSupport(sourceImageInfo, destinationImageInfo, destinationGainmapInfo); + UnloadLibrary(); + return flag; +} + +bool ImageProcessingCapiImpl::IsMetadataGenerationSupported(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) +{ + LoadLibrary(); + auto flag = CheckMetadataGenerationSupport(sourceImageInfo); + + return flag; +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::Create(OH_ImageProcessing** imageProcessor, int type) +{ + return OH_ImageProcessing::Create(imageProcessor, type, openglContext_, openclContext_); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::Destroy(OH_ImageProcessing* imageProcessor) +{ + return OH_ImageProcessing::Destroy(imageProcessor); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::SetParameter(OH_ImageProcessing* imageProcessor, + const OH_AVFormat* parameter) +{ + return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { + return obj->SetParameter(parameter); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::GetParameter(OH_ImageProcessing* imageProcessor, + OH_AVFormat* parameter) +{ + return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { + return obj->GetParameter(parameter); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::ConvertColorSpace(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( + std::shared_ptr& obj) { + return obj->ConvertColorSpace(sourceImage, destinationImage); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::Compose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &sourceGainmap, &destinationImage]( + std::shared_ptr& obj) { + return obj->Compose(sourceImage, sourceGainmap, destinationImage); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::Decompose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, + OH_PixelmapNative* destinationGainmap) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage, &destinationGainmap]( + std::shared_ptr& obj) { + return obj->Decompose(sourceImage, destinationImage, destinationGainmap); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::GenerateMetadata(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage]( + std::shared_ptr& obj) { + return obj->GenerateMetadata(sourceImage); + }); +} + +ImageProcessing_ErrorCode ImageProcessingCapiImpl::EnhanceDetail(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( + std::shared_ptr& obj) { + return obj->Process(sourceImage, destinationImage); + }); +} + +IImageProcessingNdk* CreateImageProcessingNdk() +{ + return new(std::nothrow) ImageProcessingCapiImpl(); +} + +void DestroyImageProcessingNdk(IImageProcessingNdk* obj) +{ + CHECK_AND_RETURN_LOG(obj != nullptr, "VPE image processing is null!"); + ImageProcessingCapiImpl* impl = static_cast(obj); + delete impl; +} diff --git a/framework/capi/image_processing/image_processing_factory.cpp b/framework/capi/image_processing/image_processing_factory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..455e158558e0a658756e8e73f028aa4b2326f5a3 --- /dev/null +++ b/framework/capi/image_processing/image_processing_factory.cpp @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing_factory.h" + +#include +#include + +#include "vpe_log.h" +#include "image_processing_native_template.h" +#include "metadata_generator_image_native.h" +#include "detail_enhancer_image_native.h" +#include "colorspace_converter_image_native.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +template +std::shared_ptr Create() +{ + return ImageProcessingNativeTemplate::Create(); +} +// NOTE: Add VPE feature type like below. +// VPE feature map begin +const std::unordered_map()>> CREATORS = { + { IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER, Create }, + { IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, Create }, + { IMAGE_PROCESSING_TYPE_COMPOSITION, Create }, + { IMAGE_PROCESSING_TYPE_DECOMPOSITION, Create }, + { IMAGE_PROCESSING_TYPE_METADATA_GENERATION, Create }, + // ... +}; +// VPE feature map end +} + +bool ImageProcessingFactory::IsValid(int type) +{ + return CREATORS.find(type) != CREATORS.end(); +} + +std::shared_ptr ImageProcessingFactory::CreateImageProcessing(int type) +{ + auto it = CREATORS.find(type); + if (it == CREATORS.end()) { + VPE_LOGE("Unknown type:%{public}d!", type); + return nullptr; + } + return it->second(); +} diff --git a/framework/capi/image_processing/image_processing_impl.cpp b/framework/capi/image_processing/image_processing_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f0109d5f1847142371eb5bcf8e4f2e4f2354b887 --- /dev/null +++ b/framework/capi/image_processing/image_processing_impl.cpp @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing_impl.h" + +#include "image_processing_factory.h" +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +ImageProcessing_ErrorCode OH_ImageProcessing::Create(OH_ImageProcessing** instance, int type, + std::shared_ptr openglContext, + ClContext *openclContext) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr && *instance == nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "VPE image processing instance is null or *instance is not null!"); + CHECK_AND_RETURN_RET_LOG(ImageProcessingFactory::IsValid(type), IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, + "VPE image processing type(%{public}d) is invalid!", type); + + *instance = new(std::nothrow) OH_ImageProcessing(type); + CHECK_AND_RETURN_RET_LOG(*instance != nullptr, IMAGE_PROCESSING_ERROR_NO_MEMORY, + "VPE image processing out of memory!"); + auto obj = (*instance)->GetImageProcessing(); + CHECK_AND_RETURN_RET_LOG(obj != nullptr, IMAGE_PROCESSING_ERROR_CREATE_FAILED, + "VPE image processing constructor failed!"); + obj->opengclContext_ = openclContext; + obj->openglContext_ = openglContext; + return obj->Initialize(); +} + +ImageProcessing_ErrorCode OH_ImageProcessing::Destroy(OH_ImageProcessing* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "VPE image processing instance is null!"); + auto obj = instance->GetImageProcessing(); + CHECK_AND_RETURN_RET_LOG(obj != nullptr, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, + "VPE image processing instance is empty!"); + auto errorCode = obj->Deinitialize(); + delete instance; + instance = nullptr; + return errorCode; +} + +OH_ImageProcessing::OH_ImageProcessing(int type) +{ + imageProcessing_ = ImageProcessingFactory::CreateImageProcessing(type); +} + +OH_ImageProcessing::~OH_ImageProcessing() +{ + imageProcessing_ = nullptr; +} + +std::shared_ptr OH_ImageProcessing::GetImageProcessing() +{ + return imageProcessing_; +} diff --git a/framework/capi/image_processing/image_processing_native_base.cpp b/framework/capi/image_processing/image_processing_native_base.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e099559ffae3b640160cd3461da570e71cb0efd2 --- /dev/null +++ b/framework/capi/image_processing/image_processing_native_base.cpp @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing_native_base.h" + +#include "common/native_mfmagic.h" +#include "pixelmap_native_impl.h" + +#include "image_environment_native.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +ImageProcessingNativeBase::ImageProcessingNativeBase() +{ +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Initialize() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + auto result = InitializeInner(); + isInitialized_ = true; + return result; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Deinitialize() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + auto result = DeinitializeInner(); + isInitialized_ = false; + return result; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::SetParameter(const OH_AVFormat* parameter) +{ + CHECK_AND_RETURN_RET_LOG(parameter != nullptr, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + return SetParameter(parameter->format_); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::GetParameter(OH_AVFormat* parameter) +{ + CHECK_AND_RETURN_RET_LOG(parameter != nullptr, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + return GetParameter(parameter->format_); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Process(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + auto destPixelmap = destinationImage->GetInnerPixelmap(); + auto sourePixelmap = sourceImage->GetInnerPixelmap(); + return Process(sourePixelmap, destPixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::ConvertColorSpace(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + auto destPixelmap = destinationImage->GetInnerPixelmap(); + return ConvertColorSpace(sourceImage->GetInnerPixelmap(), destPixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Compose(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr && sourceGainmap != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + auto destPixelmap = destinationImage->GetInnerPixelmap(); + return Compose(sourceImage->GetInnerPixelmap(), sourceGainmap->GetInnerPixelmap(), destPixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Decompose(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage, OH_PixelmapNative* destinationGainmap) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr && destinationGainmap != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + auto destPixelmap = destinationImage->GetInnerPixelmap(); + auto destGainmapPixelmap = destinationGainmap->GetInnerPixelmap(); + return Decompose(sourceImage->GetInnerPixelmap(), destPixelmap, destGainmapPixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::GenerateMetadata(OH_PixelmapNative* sourceImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + return GenerateMetadata(sourceImage->GetInnerPixelmap()); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::EnhanceDetail(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) +{ + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr && destinationImage != nullptr, + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + auto destPixelmap = destinationImage->GetInnerPixelmap(); + return EnhanceDetail(sourceImage->GetInnerPixelmap(), destPixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::InitializeInner() +{ + return ImageEnvironmentNative::Get().InitializeByDefault(); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::DeinitializeInner() +{ + return ImageEnvironmentNative::Get().DeinitializeByDefault(); +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::SetParameter([[maybe_unused]] const OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::GetParameter([[maybe_unused]] OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Process( + [[maybe_unused]] const std::shared_ptr& sourceImage, + [[maybe_unused]] std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::ConvertColorSpace( + [[maybe_unused]] const std::shared_ptr& sourceImage, + [[maybe_unused]] std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Compose( + [[maybe_unused]] const std::shared_ptr& sourceImage, + [[maybe_unused]] const std::shared_ptr& sourceGainmap, + [[maybe_unused]] std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::Decompose( + [[maybe_unused]] const std::shared_ptr& sourceImage, + [[maybe_unused]] std::shared_ptr& destinationImage, + [[maybe_unused]] std::shared_ptr& destinationGainmap) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::GenerateMetadata( + [[maybe_unused]] const std::shared_ptr& sourceImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode ImageProcessingNativeBase::EnhanceDetail( + [[maybe_unused]] const std::shared_ptr& sourceImage, + [[maybe_unused]] std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} diff --git a/framework/capi/image_processing/image_processing_utils.cpp b/framework/capi/image_processing/image_processing_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0fa6e2e4da4dc06d3eff5ce0ea66d9f14c18d4a7 --- /dev/null +++ b/framework/capi/image_processing/image_processing_utils.cpp @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing_utils.h" + +#include + +#include "algorithm_utils.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" +#include "vpe_utils_common.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::unordered_map ERROR_MAP = { + { VPE_ALGO_ERR_OK, IMAGE_PROCESSING_SUCCESS }, + { VPE_ALGO_ERR_NO_MEMORY, IMAGE_PROCESSING_ERROR_NO_MEMORY }, + { VPE_ALGO_ERR_INVALID_OPERATION, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_INVALID_VAL, IMAGE_PROCESSING_ERROR_INVALID_VALUE }, + { VPE_ALGO_ERR_UNKNOWN, IMAGE_PROCESSING_ERROR_UNKNOWN }, + { VPE_ALGO_ERR_INVALID_PARAM, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER }, + { VPE_ALGO_ERR_INIT_FAILED, IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED }, + { VPE_ALGO_ERR_EXTENSION_NOT_FOUND, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING }, + { VPE_ALGO_ERR_EXTENSION_INIT_FAILED, IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED }, + { VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED, IMAGE_PROCESSING_ERROR_PROCESS_FAILED }, + { VPE_ALGO_ERR_NOT_IMPLEMENTED, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING }, + { VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_INVALID_STATE, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_EXTEND_START, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, +}; +const std::unordered_map NDK_ERROR_STR_MAP = { + { IMAGE_PROCESSING_SUCCESS, VPE_TO_STR(IMAGE_PROCESSING_SUCCESS) }, + { IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, VPE_TO_STR(IMAGE_PROCESSING_ERROR_INVALID_PARAMETER) }, + { IMAGE_PROCESSING_ERROR_UNKNOWN, VPE_TO_STR(IMAGE_PROCESSING_ERROR_UNKNOWN) }, + { IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, VPE_TO_STR(IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED) }, + { IMAGE_PROCESSING_ERROR_CREATE_FAILED, VPE_TO_STR(IMAGE_PROCESSING_ERROR_CREATE_FAILED) }, + { IMAGE_PROCESSING_ERROR_PROCESS_FAILED, VPE_TO_STR(IMAGE_PROCESSING_ERROR_PROCESS_FAILED) }, + { IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, VPE_TO_STR(IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING) }, + { IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, VPE_TO_STR(IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED) }, + { IMAGE_PROCESSING_ERROR_NO_MEMORY, VPE_TO_STR(IMAGE_PROCESSING_ERROR_NO_MEMORY) }, + { IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, VPE_TO_STR(IMAGE_PROCESSING_ERROR_INVALID_INSTANCE) }, + { IMAGE_PROCESSING_ERROR_INVALID_VALUE, VPE_TO_STR(IMAGE_PROCESSING_ERROR_INVALID_VALUE) } +}; +} + +ImageProcessing_ErrorCode ImageProcessingUtils::InnerErrorToNDK(VPEAlgoErrCode errorCode) +{ + auto it = ERROR_MAP.find(errorCode); + if (it == ERROR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid error code:%{public}d", errorCode); + return IMAGE_PROCESSING_ERROR_UNKNOWN; + } + return it->second; +} + +std::string ImageProcessingUtils::ToString(ImageProcessing_ErrorCode errorCode) +{ + auto it = NDK_ERROR_STR_MAP.find(errorCode); + if (it == NDK_ERROR_STR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid error code:%{public}d", errorCode); + return "Unsupported error:" + std::to_string(static_cast(errorCode)); + } + return it->second; +} + +sptr ImageProcessingUtils::GetSurfaceBufferFromPixelMap( + const std::shared_ptr& pixelmap) +{ + return VpeUtils::GetSurfaceBufferFromPixelMap(pixelmap); +} + +ImageProcessing_ErrorCode ImageProcessingUtils::ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr bufferImpl) +{ + return VpeUtils::ConvertPixelmapToSurfaceBuffer(pixelmap, bufferImpl) ? + IMAGE_PROCESSING_SUCCESS : IMAGE_PROCESSING_ERROR_PROCESS_FAILED; +} + +ImageProcessing_ErrorCode ImageProcessingUtils::ConvertSurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + return VpeUtils::ConvertSurfaceBufferToPixelmap(buffer, pixelmap) ? + IMAGE_PROCESSING_SUCCESS : IMAGE_PROCESSING_ERROR_PROCESS_FAILED; +} + +ImageProcessing_ErrorCode ImageProcessingUtils::SetSurfaceBufferToPixelMap(const sptr& buffer, + std::shared_ptr& pixelmap) +{ + return VpeUtils::SetSurfaceBufferToPixelMap(buffer, pixelmap) ? + IMAGE_PROCESSING_SUCCESS : IMAGE_PROCESSING_ERROR_PROCESS_FAILED; +} diff --git a/framework/capi/image_processing/include/image_environment_native.h b/framework/capi/image_processing/include/image_environment_native.h new file mode 100644 index 0000000000000000000000000000000000000000..38e49b4775e8eb0ec29ad09bf2893a185c0e3446 --- /dev/null +++ b/framework/capi/image_processing/include/image_environment_native.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_ENVIRONMENT_NATIVE_H +#define IMAGE_ENVIRONMENT_NATIVE_H + +#include +#include + +#include "image_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class ImageEnvironmentNative { +public: + static ImageEnvironmentNative& Get(); + + ImageProcessing_ErrorCode Initialize(); + ImageProcessing_ErrorCode Deinitialize(); + ImageProcessing_ErrorCode InitializeByDefault(); + ImageProcessing_ErrorCode DeinitializeByDefault(); + +private: + ImageEnvironmentNative() = default; + virtual ~ImageEnvironmentNative() = default; + ImageEnvironmentNative(const ImageEnvironmentNative&) = delete; + ImageEnvironmentNative& operator=(const ImageEnvironmentNative&) = delete; + ImageEnvironmentNative(ImageEnvironmentNative&&) = delete; + ImageEnvironmentNative& operator=(ImageEnvironmentNative&&) = delete; + + ImageProcessing_ErrorCode InitializeLocked(); + ImageProcessing_ErrorCode DeinitializeLocked(); + ImageProcessing_ErrorCode InitializeEnvLocked(); + ImageProcessing_ErrorCode DeinitializeEnvLocked(); + + std::mutex lock_{}; + // Guarded by lock_ begin + bool isExplicitInit_{}; + uint32_t referenceCount_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_ENVIRONMENT_NATIVE_H diff --git a/framework/capi/image_processing/include/image_processing_capi_impl.h b/framework/capi/image_processing/include/image_processing_capi_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..91c8d7f5062e0a24f7240b1cd04a26eb5fb2f2b2 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_capi_impl.h @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_CAPI_IMPL_H +#define IMAGE_PROCESSING_CAPI_IMPL_H + +#include "image_processing_capi_interface.h" +#include "frame_info.h" +#include +#include +#include +#include + +#include "image_processing_native_template.h" +#include "image_processing_types.h" +#include "pixelmap_native_impl.h" + +#include "detail_enhancer_image.h" +#include "colorspace_converter.h" +#include "colorspace_converter_image_native.h" + +#include + +#include "detail_enhancer_common.h" +#include "detail_enhancer_image_fwk.h" +#include "image_processing_utils.h" +#include "surface_buffer.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" + +const std::map IMAGE_FORMAT_MAP = { + { OHOS::Media::PixelFormat::RGBA_8888, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { OHOS::Media::PixelFormat::BGRA_8888, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BGRA_8888 }, + { OHOS::Media::PixelFormat::RGBA_1010102, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { OHOS::Media::PixelFormat::YCBCR_P010, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { OHOS::Media::PixelFormat::YCRCB_P010, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_P010 }, +}; +typedef enum { + NONE = 0, + BASE = 1, + GAINMAP = 2, + ALTERNATE = 3, +} ImagePixelmapHdrMetadataType; +const std::map HDR_METADATA_TYPE_MAP = { + { NONE, OHOS::HDI::Display::Graphic::Common::V1_0::CM_METADATA_NONE }, + { BASE, OHOS::HDI::Display::Graphic::Common::V1_0::CM_IMAGE_HDR_VIVID_DUAL }, + { GAINMAP, OHOS::HDI::Display::Graphic::Common::V1_0::CM_IMAGE_HDR_VIVID_DUAL }, + { ALTERNATE, OHOS::HDI::Display::Graphic::Common::V1_0::CM_IMAGE_HDR_VIVID_SINGLE }, +}; +typedef enum { + UNKNOWN = 0, + ADOBE_RGB_1998 = 1, + DCI_P3 = 2, + DISPLAY_P3 = 3, + SRGB = 4, + BT709 = 5, + BT601_EBU = 6, + BT2020_HLG = 9, + BT2020_PQ = 10, + P3_HLG = 11, + DISPLAY_P3_LIMIT = 14, + SRGB_LIMIT = 15, + BT2020_HLG_LIMIT = 19, + BT2020_PQ_LIMIT = 20, +} ImagePixelmapColorspace; +const std::map COLORSPACE_MAP = { + { SRGB, OHOS::HDI::Display::Graphic::Common::V1_0::CM_SRGB_FULL }, + { SRGB_LIMIT, OHOS::HDI::Display::Graphic::Common::V1_0::CM_SRGB_LIMIT }, + { DISPLAY_P3, OHOS::HDI::Display::Graphic::Common::V1_0::CM_P3_FULL }, + { DISPLAY_P3_LIMIT, OHOS::HDI::Display::Graphic::Common::V1_0::CM_P3_LIMIT }, + { BT2020_HLG, OHOS::HDI::Display::Graphic::Common::V1_0::CM_BT2020_HLG_FULL }, + { BT2020_HLG_LIMIT, OHOS::HDI::Display::Graphic::Common::V1_0::CM_BT2020_HLG_LIMIT }, + { BT2020_PQ, OHOS::HDI::Display::Graphic::Common::V1_0::CM_BT2020_PQ_FULL }, + { BT2020_PQ_LIMIT, OHOS::HDI::Display::Graphic::Common::V1_0::CM_BT2020_PQ_LIMIT }, + { P3_HLG, OHOS::HDI::Display::Graphic::Common::V1_0::CM_P3_HLG_LIMIT }, + { ADOBE_RGB_1998, OHOS::HDI::Display::Graphic::Common::V1_0::CM_ADOBERGB_FULL }, +}; +class ImageProcessingCapiImpl : public IImageProcessingNdk { +public: + ImageProcessingCapiImpl() = default; + virtual ~ImageProcessingCapiImpl() = default; + ImageProcessingCapiImpl(const ImageProcessingCapiImpl&) = delete; + ImageProcessingCapiImpl& operator=(const ImageProcessingCapiImpl&) = delete; + ImageProcessingCapiImpl(ImageProcessingCapiImpl&&) = delete; + ImageProcessingCapiImpl& operator=(ImageProcessingCapiImpl&&) = delete; + + ImageProcessing_ErrorCode InitializeEnvironment() final; + ImageProcessing_ErrorCode DeinitializeEnvironment() final; + bool IsColorSpaceConversionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) final; + bool IsCompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) final; + bool IsDecompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) final; + bool IsMetadataGenerationSupported(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) final; + ImageProcessing_ErrorCode Create(OH_ImageProcessing** imageProcessor, int32_t type) final; + ImageProcessing_ErrorCode Destroy(OH_ImageProcessing* imageProcessor) final; + ImageProcessing_ErrorCode SetParameter(OH_ImageProcessing* imageProcessor, + const OH_AVFormat* parameter) final; + ImageProcessing_ErrorCode GetParameter(OH_ImageProcessing* imageProcessor, OH_AVFormat* parameter) final; + ImageProcessing_ErrorCode ConvertColorSpace(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) final; + ImageProcessing_ErrorCode Compose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) final; + ImageProcessing_ErrorCode Decompose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, + OH_PixelmapNative* destinationGainmap) final; + ImageProcessing_ErrorCode GenerateMetadata(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage) final; + ImageProcessing_ErrorCode EnhanceDetail(OH_ImageProcessing* imageProcessor, OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) final; +private: + ImageProcessing_ErrorCode LoadAlgo(); + void LoadLibrary(); + void UnloadLibrary(); + bool CheckColorSpaceConversionSupport( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo); + bool CheckCompositionSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo); + bool CheckDecompositionSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo); + bool CheckMetadataGenerationSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo); + void* mLibHandle{}; + using LibFunction = bool (*)(const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo, + const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo outputInfo); + using LibMetaFunction = bool (*)(const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo); + LibFunction isColorSpaceConversionSupported_{nullptr}; + LibFunction isCompositionSupported_{nullptr}; + LibFunction isDecompositionSupported_{nullptr}; + LibMetaFunction isMetadataGenSupported_{nullptr}; + std::mutex lock_; + int32_t usedInstance_ { 0 }; + ClContext *openclContext_ {nullptr}; + ImageProcessing_ErrorCode OpenCLInit(); + std::shared_ptr openglContext_ {nullptr}; + ImageProcessing_ErrorCode OpenGLInit(); +}; + +extern "C" IImageProcessingNdk* CreateImageProcessingNdk(); +extern "C" void DestroyImageProcessingNdk(IImageProcessingNdk* obj); + +#endif // IMAGE_PROCESSING_CAPI_IMPL_H diff --git a/framework/capi/image_processing/include/image_processing_factory.h b/framework/capi/image_processing/include/image_processing_factory.h new file mode 100644 index 0000000000000000000000000000000000000000..83f8a64ae4fa27021187a711dd943ccf7e027430 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_factory.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_FACTORY_H +#define IMAGE_PROCESSING_FACTORY_H + +#include + +#include "image_processing_interface.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Factory class for creating image processing object. + */ +class ImageProcessingFactory { +public: + static bool IsValid(int type); + static std::shared_ptr CreateImageProcessing(int type); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_FACTORY_H diff --git a/framework/capi/image_processing/include/image_processing_impl.h b/framework/capi/image_processing/include/image_processing_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..4f76034e3f065f89a48e5a55878dfe3c70b1c1f3 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_impl.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_IMPL_H +#define IMAGE_PROCESSING_IMPL_H + +#include + +#include "image_processing_interface.h" + +struct OH_ImageProcessing { +public: + static ImageProcessing_ErrorCode Create(OH_ImageProcessing** instance, int type, + std::shared_ptr openglContext, + ClContext *openclContext); + static ImageProcessing_ErrorCode Destroy(OH_ImageProcessing* instance); + + std::shared_ptr GetImageProcessing(); + +private: + OH_ImageProcessing(int type); + ~OH_ImageProcessing(); + + std::shared_ptr imageProcessing_{}; +}; + +#endif // IMAGE_PROCESSING_IMPL_H \ No newline at end of file diff --git a/framework/capi/image_processing/include/image_processing_interface.h b/framework/capi/image_processing/include/image_processing_interface.h new file mode 100644 index 0000000000000000000000000000000000000000..345d3346a76861ba9bef05cbfe25e96b526e7382 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_interface.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_INTERFACE_H +#define IMAGE_PROCESSING_INTERFACE_H + +#include "image_processing_types.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class IImageProcessingNative { +public: + virtual ImageProcessing_ErrorCode Initialize() = 0; + virtual ImageProcessing_ErrorCode Deinitialize() = 0; + virtual ImageProcessing_ErrorCode SetParameter(const OH_AVFormat* parameter) = 0; + virtual ImageProcessing_ErrorCode GetParameter(OH_AVFormat* parameter) = 0; + virtual ImageProcessing_ErrorCode Process(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) = 0; + virtual ImageProcessing_ErrorCode ConvertColorSpace(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) = 0; + virtual ImageProcessing_ErrorCode Compose(OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, + OH_PixelmapNative* destinationImage) = 0; + virtual ImageProcessing_ErrorCode Decompose(OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, + OH_PixelmapNative* destinationGainmap) = 0; + virtual ImageProcessing_ErrorCode GenerateMetadata(OH_PixelmapNative* sourceImage) = 0; + virtual ImageProcessing_ErrorCode EnhanceDetail(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) = 0; + +protected: + IImageProcessingNative() = default; + virtual ~IImageProcessingNative() = default; + IImageProcessingNative(const IImageProcessingNative&) = delete; + IImageProcessingNative& operator=(const IImageProcessingNative&) = delete; + IImageProcessingNative(IImageProcessingNative&&) = delete; + IImageProcessingNative& operator=(IImageProcessingNative&&) = delete; + +public: + ClContext *opengclContext_ {nullptr}; + std::shared_ptr openglContext_ {nullptr}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_INTERFACE_H \ No newline at end of file diff --git a/framework/capi/image_processing/include/image_processing_native_base.h b/framework/capi/image_processing/include/image_processing_native_base.h new file mode 100644 index 0000000000000000000000000000000000000000..3c1837374a8314f139f80ac847ed4249d2670710 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_native_base.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_NATIVE_BASE_H +#define IMAGE_PROCESSING_NATIVE_BASE_H + +#include "common/native_mfmagic.h" +#include "pixelmap_native_impl.h" +#include "pixel_map.h" + +#include "image_processing_interface.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Base implementaion for image processing. + */ +class ImageProcessingNativeBase : public IImageProcessingNative { +public: + ImageProcessing_ErrorCode Initialize() final; + ImageProcessing_ErrorCode Deinitialize() final; + ImageProcessing_ErrorCode SetParameter(const OH_AVFormat* parameter) final; + ImageProcessing_ErrorCode GetParameter(OH_AVFormat* parameter) final; + ImageProcessing_ErrorCode Process(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) final; + ImageProcessing_ErrorCode ConvertColorSpace(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage) final; + ImageProcessing_ErrorCode Compose(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) final; + ImageProcessing_ErrorCode Decompose(OH_PixelmapNative* sourceImage, + OH_PixelmapNative* destinationImage, OH_PixelmapNative* destinationGainmap) final; + ImageProcessing_ErrorCode GenerateMetadata(OH_PixelmapNative* sourceImage) final; + ImageProcessing_ErrorCode EnhanceDetail(OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) final; +protected: + explicit ImageProcessingNativeBase(); + virtual ~ImageProcessingNativeBase() = default; + ImageProcessingNativeBase(const ImageProcessingNativeBase&) = delete; + ImageProcessingNativeBase& operator=(const ImageProcessingNativeBase&) = delete; + ImageProcessingNativeBase(ImageProcessingNativeBase&&) = delete; + ImageProcessingNativeBase& operator=(ImageProcessingNativeBase&&) = delete; + + virtual ImageProcessing_ErrorCode InitializeInner(); + virtual ImageProcessing_ErrorCode DeinitializeInner(); + virtual ImageProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter); + virtual ImageProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter); + virtual ImageProcessing_ErrorCode Process(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage); + virtual ImageProcessing_ErrorCode ConvertColorSpace(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage); + virtual ImageProcessing_ErrorCode Compose(const std::shared_ptr& sourceImage, + const std::shared_ptr& sourceGainmap, + std::shared_ptr& destinationImage); + virtual ImageProcessing_ErrorCode Decompose(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage, + std::shared_ptr& destinationGainmap); + virtual ImageProcessing_ErrorCode GenerateMetadata(const std::shared_ptr& sourceImage); + virtual ImageProcessing_ErrorCode EnhanceDetail(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage); + +private: + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_NATIVE_BASE_H \ No newline at end of file diff --git a/framework/capi/image_processing/include/image_processing_native_template.h b/framework/capi/image_processing/include/image_processing_native_template.h new file mode 100644 index 0000000000000000000000000000000000000000..f5781533835697d15a83970da97d43886b0f7d09 --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_native_template.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_NATIVE_TEMPLATE_H +#define IMAGE_PROCESSING_NATIVE_TEMPLATE_H + +#include + +#include "nocopyable.h" +#include "image_processing_native_base.h" + +#define DEFINE_WITH_DISALLOW_COPY_AND_MOVE(className) \ + className([[maybe_unused]] Protected mask) \ + : ImageProcessingNativeTemplate() {} \ + virtual ~className() = default; \ + DISALLOW_COPY_AND_MOVE(className) + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Base implementaion for image processing. + */ +template +class ImageProcessingNativeTemplate : public ImageProcessingNativeBase, public std::enable_shared_from_this { +public: + static inline std::shared_ptr Create() + { + return std::make_shared(Protected()); + } + +protected: + struct Protected { explicit Protected() = default; }; + + explicit ImageProcessingNativeTemplate() : ImageProcessingNativeBase() {} + virtual ~ImageProcessingNativeTemplate() = default; + ImageProcessingNativeTemplate(const ImageProcessingNativeTemplate&) = delete; + ImageProcessingNativeTemplate& operator=(const ImageProcessingNativeTemplate&) = delete; + ImageProcessingNativeTemplate(ImageProcessingNativeTemplate&&) = delete; + ImageProcessingNativeTemplate& operator=(ImageProcessingNativeTemplate&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_NATIVE_TEMPLATE_H \ No newline at end of file diff --git a/framework/capi/image_processing/include/image_processing_utils.h b/framework/capi/image_processing/include/image_processing_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..a981287c9bb9bde61a72ee84ec8a7dc663aeb2ee --- /dev/null +++ b/framework/capi/image_processing/include/image_processing_utils.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMAGE_PROCESSING_UTILS_H +#define IMAGE_PROCESSING_UTILS_H + +#include "pixelmap_native_impl.h" +#include "surface_buffer.h" + +#include "algorithm_errors.h" +#include "image_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class ImageProcessingUtils { +public: + static ImageProcessing_ErrorCode InnerErrorToNDK(VPEAlgoErrCode errorCode); + static std::string ToString(ImageProcessing_ErrorCode errorCode); + static sptr GetSurfaceBufferFromPixelMap(const std::shared_ptr& pixelmap); + static ImageProcessing_ErrorCode ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr bufferImpl); + static ImageProcessing_ErrorCode ConvertSurfaceBufferToPixelmap(const sptr& buffer, + std::shared_ptr& pixelmap); + static ImageProcessing_ErrorCode SetSurfaceBufferToPixelMap(const sptr& buffer, + std::shared_ptr& pixelmap); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_PROCESSING_UTILS_H \ No newline at end of file diff --git a/framework/capi/image_processing/metadata_generator/include/metadata_generator_image_native.h b/framework/capi/image_processing/metadata_generator/include/metadata_generator_image_native.h new file mode 100644 index 0000000000000000000000000000000000000000..8a7bc4c093ad075b7fbce03af575cf427118cc07 --- /dev/null +++ b/framework/capi/image_processing/metadata_generator/include/metadata_generator_image_native.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef METADATA_GENERATOR_IMAGE_NATIVE_H +#define METADATA_GENERATOR_IMAGE_NATIVE_H + +#include +#include +#include +#include + +#include "detail_enhancer_common.h" +#include "detail_enhancer_image_fwk.h" +#include "image_processing_utils.h" +#include "surface_buffer.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" +#include "image_processing_native_template.h" +#include "image_processing_types.h" +#include "pixelmap_native_impl.h" +#include "detail_enhancer_image.h" +#include "metadata_generator.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class MetadataGeneratorImageNative : public ImageProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(MetadataGeneratorImageNative); + + ImageProcessing_ErrorCode InitializeInner() override; + ImageProcessing_ErrorCode DeinitializeInner() override; + ImageProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + ImageProcessing_ErrorCode Process(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode ConvertColorSpace(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode Compose(const std::shared_ptr& sourceImage, + const std::shared_ptr& sourceGainmap, + std::shared_ptr& destinationImage) override; + ImageProcessing_ErrorCode Decompose(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage, + std::shared_ptr& destinationGainmap) override; + ImageProcessing_ErrorCode GenerateMetadata(const std::shared_ptr& sourceImage) override; + ImageProcessing_ErrorCode EnhanceDetail(const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) override; +private: + ImageProcessing_ErrorCode CheckParameter(); + ImageProcessing_ErrorCode ConvertPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& bufferImpl); + uint32_t GetColorSpaceType(const CM_ColorSpaceInfo &colorSpaceInfo); + CM_ColorSpaceInfo GetColorSpaceInfo(const uint32_t colorSpaceType); + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr metadataGenerator_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // METADATA_GENERATOR_IMAGE_NATIVE_H \ No newline at end of file diff --git a/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp b/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ad6ab7aaf2031782b8742501cfc7d4a464969c41 --- /dev/null +++ b/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp @@ -0,0 +1,193 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "metadata_generator_image_native.h" + +#include + +#include "image_processing_capi_impl.h" +#include "detail_enhancer_common.h" +#include "detail_enhancer_image_fwk.h" +#include "image_processing_utils.h" +#include "surface_buffer.h" +#include "surface_buffer_impl.h" +#include "surface_type.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::InitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + metadataGenerator_ = MetadataGenerator::Create(openglContext_); + CHECK_AND_RETURN_RET_LOG(metadataGenerator_ != nullptr, IMAGE_PROCESSING_ERROR_CREATE_FAILED, + "Create detail colorspaceConverter failed!"); + isInitialized_ = true; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::DeinitializeInner() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + metadataGenerator_ = nullptr; + isInitialized_ = false; + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::SetParameter(const OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::GetParameter(OHOS::Media::Format& parameter) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::Process( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} +ImageProcessing_ErrorCode MetadataGeneratorImageNative::ConvertColorSpace( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} +uint32_t MetadataGeneratorImageNative::GetColorSpaceType(const CM_ColorSpaceInfo &colorSpaceInfo) +{ + return ((static_cast(colorSpaceInfo.primaries) << COLORPRIMARIES_OFFSET) + + (static_cast(colorSpaceInfo.transfunc) << TRANSFUNC_OFFSET) + + (static_cast(colorSpaceInfo.matrix) << MATRIX_OFFSET) + + (static_cast(colorSpaceInfo.range) << RANGE_OFFSET)); +} +CM_ColorSpaceInfo MetadataGeneratorImageNative::GetColorSpaceInfo(const uint32_t colorSpaceType) +{ + CM_ColorSpaceInfo info; + info.primaries = static_cast((colorSpaceType & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET); + info.transfunc = static_cast((colorSpaceType & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET); + info.matrix = static_cast((colorSpaceType & MATRIX_MASK) >> MATRIX_OFFSET); + info.range = static_cast((colorSpaceType & RANGE_MASK) >> RANGE_OFFSET); + return info; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::Compose( + const std::shared_ptr& sourceImage, + const std::shared_ptr& sourceGainmap, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::Decompose( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage, + std::shared_ptr& destinationGainmap) +{ + return IMAGE_PROCESSING_SUCCESS; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::GenerateMetadata( + const std::shared_ptr& sourceImage) +{ + MetadataGeneratorParameter parameterMT; + parameterMT.algoType = MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE; + auto ret = ImageProcessingUtils::InnerErrorToNDK(metadataGenerator_->SetParameter(parameterMT)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "cSetParameter failed!"); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + "Detail enhancer image is not initialized!"); + CHECK_AND_RETURN_RET_LOG(sourceImage != nullptr, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER, "sourceImage is null!"); + sptr sourceImageSurfaceBuffer = nullptr; + ret = ConvertPixelmapToSurfaceBuffer(sourceImage, sourceImageSurfaceBuffer); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "convert to surface buffer failed!"); + ret = ImageProcessingUtils::InnerErrorToNDK( + metadataGenerator_->Process(sourceImageSurfaceBuffer)); + CHECK_AND_RETURN_RET_LOG(ret == IMAGE_PROCESSING_SUCCESS, ret, "process failed!"); + return ret; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::EnhanceDetail( + const std::shared_ptr& sourceImage, + std::shared_ptr& destinationImage) +{ + return IMAGE_PROCESSING_SUCCESS; +} +// LCOV_EXCL_START +static CM_ColorSpaceType ConvertColorSpaceType(ColorManager::ColorSpaceName colorSpace, bool base) +{ + switch (colorSpace) { + case ColorManager::ColorSpaceName::SRGB : + return CM_SRGB_FULL; + case ColorManager::ColorSpaceName::SRGB_LIMIT : + return CM_SRGB_LIMIT; + case ColorManager::ColorSpaceName::DISPLAY_P3 : + return CM_P3_FULL; + case ColorManager::ColorSpaceName::DISPLAY_P3_LIMIT : + return CM_P3_LIMIT; + case ColorManager::ColorSpaceName::BT2020 : + case ColorManager::ColorSpaceName::BT2020_HLG : + return CM_BT2020_HLG_FULL; + case ColorManager::ColorSpaceName::BT2020_HLG_LIMIT : + return CM_BT2020_HLG_LIMIT; + case ColorManager::ColorSpaceName::BT2020_PQ : + return CM_BT2020_PQ_FULL; + case ColorManager::ColorSpaceName::BT2020_PQ_LIMIT : + return CM_BT2020_PQ_LIMIT; + default: + return base ? CM_SRGB_FULL : CM_BT2020_HLG_FULL; + } + return base ? CM_SRGB_FULL : CM_BT2020_HLG_FULL; +} + +ImageProcessing_ErrorCode MetadataGeneratorImageNative::ConvertPixelmapToSurfaceBuffer( + const std::shared_ptr& pixelmap, sptr& bufferImpl) +{ + auto it = IMAGE_FORMAT_MAP.find(pixelmap->GetPixelFormat()); + CHECK_AND_RETURN_RET_LOG(it != IMAGE_FORMAT_MAP.end(), IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "unsupported format: %{public}d", pixelmap->GetPixelFormat()); + CHECK_AND_RETURN_RET_LOG(pixelmap->GetAllocatorType() == AllocatorType::DMA_ALLOC, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "GetAllocatorType: %{public}d", pixelmap->GetAllocatorType()); + bufferImpl = reinterpret_cast(pixelmap->GetFd()); + CHECK_AND_RETURN_RET_LOG(bufferImpl != nullptr, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "bufferImpl is nullptr"); + auto colorspace = ConvertColorSpaceType(pixelmap->InnerGetGrColorSpace().GetColorSpaceName(), true); + auto colorspaceinfo = GetColorSpaceInfo(colorspace); + VPE_LOGD("colorspace : %{public}d", colorspace); + std::vector colorSpaceInfoVec; + colorSpaceInfoVec.resize(sizeof(CM_ColorSpaceInfo)); + auto ret = memcpy_s(colorSpaceInfoVec.data(), colorSpaceInfoVec.size(), &colorspaceinfo, + sizeof(CM_ColorSpaceInfo)); + CHECK_AND_RETURN_RET_LOG(ret == EOK, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "memcpy_s, err: %{public}d", ret); + auto err = bufferImpl->SetMetadata(ATTRKEY_COLORSPACE_INFO, colorSpaceInfoVec); + CHECK_AND_RETURN_RET_LOG(GSERROR_OK == err, IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + "Get hdr metadata type failed, err: %{public}d", err); + return IMAGE_PROCESSING_SUCCESS; +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/framework/capi/video_processing/aihdr_enhancer/aihdr_enhancer_video_native.cpp b/framework/capi/video_processing/aihdr_enhancer/aihdr_enhancer_video_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a2c6ea012a2fb8b2e3cd7fca6efb34238d60fcbd --- /dev/null +++ b/framework/capi/video_processing/aihdr_enhancer/aihdr_enhancer_video_native.cpp @@ -0,0 +1,202 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "aihdr_enhancer_video_native.h" + +#include + +#include "native_window.h" + +#include "video_processing_utils.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +int32_t AihdrEnhancerVideoNative::InitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + aihdrEnhancer_ = AihdrEnhancerVideo::Create(); + CHECK_AND_RETURN_RET_LOG(aihdrEnhancer_ != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create colorSpace converter failed!"); + isInitialized_ = true; + return 0; +} + +int32_t AihdrEnhancerVideoNative::DeinitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + aihdrEnhancer_ = nullptr; + isInitialized_ = false; + return 0; +} + +int32_t AihdrEnhancerVideoNative::RegisterCallback() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + auto callback = std::make_shared(shared_from_this()); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create callback failed!"); + CHECK_AND_RETURN_RET_LOG(aihdrEnhancer_->SetCallback(callback) == 0, + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "RegisterCallback failed!"); + return 0; +} + +int32_t AihdrEnhancerVideoNative::SetSurface(const OHNativeWindow* window) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + CHECK_AND_RETURN_RET_LOG(window != nullptr && window->surface != nullptr, + VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "window is null or surface buffer is null!"); + return aihdrEnhancer_->SetSurface(window); +} + +int32_t AihdrEnhancerVideoNative::GetSurface(OHNativeWindow** window) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + CHECK_AND_RETURN_RET_LOG(window != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "window is null!"); + return aihdrEnhancer_->GetSurface(window); +} + +int32_t AihdrEnhancerVideoNative::SetParameter(const int& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +int32_t AihdrEnhancerVideoNative::GetParameter(int& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +int32_t AihdrEnhancerVideoNative::OnStart() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + int32_t ret = 0; + ret = aihdrEnhancer_->Configure(); + CHECK_AND_RETURN_RET_LOG(ret == 0, VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)), + "Configure fail!"); + ret = aihdrEnhancer_->Prepare(); + CHECK_AND_RETURN_RET_LOG(ret == 0, VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)), + "Prepare fail!"); + ret = aihdrEnhancer_->Start(); + CHECK_AND_RETURN_RET_LOG(ret == 0, VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)), + "Start fail!"); + return 0; +} + +int32_t AihdrEnhancerVideoNative::OnStop() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return static_cast(aihdrEnhancer_->Stop()); +} + +int32_t AihdrEnhancerVideoNative::OnRenderOutputBuffer(uint32_t index) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return static_cast(aihdrEnhancer_->ReleaseOutputBuffer(index, true)); +} + +void AihdrEnhancerVideoNative::OnError(int32_t errorCode) +{ + CHECK_AND_RETURN_LOG(onError_ != nullptr, "onError_ is null!"); + onError_(errorCode); +} + +void AihdrEnhancerVideoNative::OnState(int32_t state) +{ + CHECK_AND_RETURN_LOG(onState_ != nullptr, "onState_ is null!"); + onState_(state); +} + +void AihdrEnhancerVideoNative::OnNewOutputBuffer(uint32_t index) +{ + OnRenderOutputBuffer(index); + CHECK_AND_RETURN_LOG(onNewOutputBuffer_ != nullptr, "onNewOutputBuffer_ is null!"); + onNewOutputBuffer_(index); +} + +void AihdrEnhancerVideoNative::BindOnError(VideoProcessingCallback_OnError onError) +{ + onError_ = onError; +} + +void AihdrEnhancerVideoNative::BindOnState(VideoProcessingCallback_OnState onState) +{ + onState_ = onState; +} + +void AihdrEnhancerVideoNative::BindOnNewOutputBuffer(VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) +{ + onNewOutputBuffer_ = onNewOutputBuffer; +} + +AihdrEnhancerVideoNative::NativeCallback::NativeCallback( + const std::shared_ptr& owner) + : owner_(owner) +{ +} + +void AihdrEnhancerVideoNative::NativeCallback::OnError(int32_t errorCode) +{ + SendCallback([this, &errorCode]() { + owner_->OnError(static_cast(errorCode)); + }); +} + +void AihdrEnhancerVideoNative::NativeCallback::OnState(int32_t state) +{ + SendCallback([this, &state]() { + owner_->OnState(static_cast(state)); + }); +} + + +void AihdrEnhancerVideoNative::NativeCallback::OnOutputBufferAvailable(uint32_t index, + [[maybe_unused]] AihdrEnhancerBufferFlag flag) +{ + SendCallback([this, &index]() { + owner_->OnNewOutputBuffer(index); + }); +} + +void AihdrEnhancerVideoNative::NativeCallback::SendCallback(std::function&& callback) const +{ + CHECK_AND_RETURN_LOG(owner_ != nullptr, "owner_ is null!"); + callback(); +} diff --git a/framework/capi/video_processing/aihdr_enhancer/include/aihdr_enhancer_video_native.h b/framework/capi/video_processing/aihdr_enhancer/include/aihdr_enhancer_video_native.h new file mode 100644 index 0000000000000000000000000000000000000000..23969eb993300daad89274269fc37ca83e02c1fe --- /dev/null +++ b/framework/capi/video_processing/aihdr_enhancer/include/aihdr_enhancer_video_native.h @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_VIDEO_NATIVE_H +#define AIHDR_ENHANCER_VIDEO_NATIVE_H + +#include +#include +#include + +#include "external_window.h" + +#include "aihdr_enhancer_video.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +typedef void (*VideoProcessingCallback_OnError)(int32_t error); +typedef void (*VideoProcessingCallback_OnState)(int32_t state); +typedef void (*VideoProcessingCallback_OnNewOutputBuffer)(uint32_t index); + +class __attribute__((visibility("default"))) AihdrEnhancerVideoNative : + public std::enable_shared_from_this { +public: + int32_t InitializeInner(); + int32_t DeinitializeInner(); + int32_t RegisterCallback(); + int32_t SetSurface(const OHNativeWindow* window); + int32_t GetSurface(OHNativeWindow** window); + int32_t SetParameter(const int& parameter); + int32_t GetParameter(int& parameter); + int32_t OnStart(); + int32_t OnStop(); + int32_t OnRenderOutputBuffer(uint32_t index); + + void BindOnError(VideoProcessingCallback_OnError onError); + void BindOnState(VideoProcessingCallback_OnState onState); + void BindOnNewOutputBuffer(VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer); + + void OnError(int32_t errorCode); + void OnState(int32_t state); + void OnNewOutputBuffer(uint32_t index); + +private: + class NativeCallback : public AihdrEnhancerVideoCallback { + public: + explicit NativeCallback(const std::shared_ptr& owner); + virtual ~NativeCallback() = default; + NativeCallback(const NativeCallback&) = delete; + NativeCallback& operator=(const NativeCallback&) = delete; + NativeCallback(NativeCallback&&) = delete; + NativeCallback& operator=(NativeCallback&&) = delete; + + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, AihdrEnhancerBufferFlag flag) override; + + private: + void SendCallback(std::function&& callback) const; + + const std::shared_ptr owner_{}; + }; + + VideoProcessingCallback_OnError onError_{}; + VideoProcessingCallback_OnState onState_{}; + VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer_{}; + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr aihdrEnhancer_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // AIHDR_ENHANCER_VIDEO_NATIVE_H diff --git a/framework/capi/video_processing/colorspace_converter/colorSpace_converter_video_native.cpp b/framework/capi/video_processing/colorspace_converter/colorSpace_converter_video_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0e5369dd146b0ba32ca8c28e1b225339772d0a86 --- /dev/null +++ b/framework/capi/video_processing/colorspace_converter/colorSpace_converter_video_native.cpp @@ -0,0 +1,237 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "video_processing_utils.h" +#include "colorSpace_converter_video_native.h" +#include "colorspace_converter_video_description.h" +#include "video_processing_capi_capability.h" +#include "surface_type.h" +#include "algorithm_common.h" +#include "v1_0/cm_color_space.h" +#include "native_buffer.h" +#include "window.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::InitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + colorSpaceConverter_ = ColorSpaceConverterVideo::Create(openglContext_); + CHECK_AND_RETURN_RET_LOG(colorSpaceConverter_ != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create colorSpace converter failed!"); + isInitialized_ = true; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::DeinitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + colorSpaceConverter_ = nullptr; + isInitialized_ = false; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::RegisterCallback() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + auto callback = std::make_shared(shared_from_this()); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create callback failed!"); + CHECK_AND_RETURN_RET_LOG(colorSpaceConverter_->SetCallback(callback) == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "RegisterCallback failed!"); + + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::SetSurface(const sptr& surface, + const OHNativeWindow& window) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "window surface is null!"); + colorSpaceConverter_->SetOutputSurface(surface); + + colorSpaceValue_ = static_cast(CM_BT709_LIMIT); + formatValue_ = static_cast(GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + metadataValue_ = static_cast(CM_METADATA_NONE); + int32_t formatGet = 0; + auto ret = OH_NativeWindow_NativeWindowHandleOpt(const_cast(&window), GET_FORMAT, &formatGet); + if ((ret == 0) && (formatGet != 0)) { + OH_NativeBuffer_Format formatGetResult = static_cast(formatGet); + auto itFormat = NATIVE_FORMAT_TO_GRAPHIC_MAP.find(formatGetResult); + if (itFormat != NATIVE_FORMAT_TO_GRAPHIC_MAP.end()) { + formatValue_ = static_cast(itFormat->second); + } + } + + CHECK_AND_RETURN_RET_LOG(const_cast(&window) != nullptr, + VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "SetSurface window is null!"); + OH_NativeBuffer_ColorSpace colorSpaceOut = OH_COLORSPACE_NONE; + ret = OH_NativeWindow_GetColorSpace(const_cast(&window), &colorSpaceOut); + CHECK_AND_LOG(ret == 0, "OH_NativeWindow_GetColorSpace fail!"); + if ((ret == 0) && (colorSpaceOut != 0)) { + auto itColorSpace = NATIVE_COLORSPACE_TO_CM_MAP.find(colorSpaceOut); + if (itColorSpace != NATIVE_COLORSPACE_TO_CM_MAP.end()) { + colorSpaceValue_ = static_cast(itColorSpace->second); + } + } + int32_t buffSize; + uint8_t *checkMetaData; + ret = OH_NativeWindow_GetMetadataValue(const_cast(&window), OH_HDR_METADATA_TYPE, + &buffSize, &checkMetaData); + if (ret != 0) { + VPE_LOGE("OH_NativeWindow_GetMetadataValue failed, ret:%{public}d", ret); + return VIDEO_PROCESSING_SUCCESS; + } + OH_NativeBuffer_MetadataType metadataGetResult = static_cast(checkMetaData[0]); + auto itMetadata = NATIVE_METADATATYPE_TO_CM_MAP.find(metadataGetResult); + if (itMetadata != NATIVE_METADATATYPE_TO_CM_MAP.end()) { + metadataValue_ = static_cast(itMetadata->second); + } + delete[] checkMetaData; + return VIDEO_PROCESSING_SUCCESS; +} + +sptr ColorSpaceConverterVideoNative::GetSurface() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), nullptr, "Initialization failed!"); + return colorSpaceConverter_->CreateInputSurface(); +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::SetParameter(const OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::GetParameter(OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::OnStart() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + int32_t ret = 0; + OHOS::Media::Format format; + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, formatValue_); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, + ((static_cast(colorSpaceValue_) & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, + ((static_cast(colorSpaceValue_) & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, + ((static_cast(colorSpaceValue_) & MATRIX_MASK) >> MATRIX_OFFSET)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, + ((static_cast(colorSpaceValue_) & RANGE_MASK) >> RANGE_OFFSET)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, metadataValue_); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + ret = colorSpaceConverter_->Configure(format); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + ret = colorSpaceConverter_->Prepare(); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + ret = colorSpaceConverter_->Start(); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::OnStop() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK( + static_cast(colorSpaceConverter_->Stop())); +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNative::OnRenderOutputBuffer(uint32_t index) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK( + static_cast(colorSpaceConverter_->ReleaseOutputBuffer(index, true))); +} + +ColorSpaceConverterVideoNative::NativeCallback::NativeCallback( + const std::shared_ptr& owner) + : owner_(owner) +{ +} + +void ColorSpaceConverterVideoNative::NativeCallback::OnError(int32_t errorCode) +{ + SendCallback([this, &errorCode]() { + owner_->OnError(VideoProcessingUtils::InnerErrorToNDK(static_cast(errorCode))); + }); +} + +void ColorSpaceConverterVideoNative::NativeCallback::OnState(int32_t state) +{ + SendCallback([this, &state]() { + owner_->OnState(VideoProcessingUtils::InnerStateToNDK(static_cast(state))); + }); +} + +void ColorSpaceConverterVideoNative::NativeCallback::OnOutputBufferAvailable(uint32_t index, + [[maybe_unused]] CscvBufferFlag flag) +{ + SendCallback([this, &index]() { + owner_->OnNewOutputBuffer(index); + }); +} + +void ColorSpaceConverterVideoNative::NativeCallback::OnOutputFormatChanged(const Format& format) +{ + (void)format; +} + +void ColorSpaceConverterVideoNative::NativeCallback::SendCallback(std::function&& callback) const +{ + if (owner_ == nullptr) { + VPE_LOGE("owner is null!"); + return; + } + + callback(); +} diff --git a/framework/capi/video_processing/colorspace_converter/include/colorSpace_converter_video_native.h b/framework/capi/video_processing/colorspace_converter/include/colorSpace_converter_video_native.h new file mode 100644 index 0000000000000000000000000000000000000000..2e9ce1368dbcdafd22344016794c49ef67ed1445 --- /dev/null +++ b/framework/capi/video_processing/colorspace_converter/include/colorSpace_converter_video_native.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLORSPACE_CONVERTER_VIDEO_NATIVE_H +#define COLORSPACE_CONVERTER_VIDEO_NATIVE_H + +#include +#include +#include + +#include "video_processing_native_template.h" +#include "colorspace_converter_video.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * ColorSpace converter NDK interface implementaion. + */ +class ColorSpaceConverterVideoNative : public VideoProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(ColorSpaceConverterVideoNative); + + VideoProcessing_ErrorCode InitializeInner() override; + VideoProcessing_ErrorCode DeinitializeInner() override; + VideoProcessing_ErrorCode RegisterCallback() override; + VideoProcessing_ErrorCode SetSurface(const sptr& surface, const OHNativeWindow& window) override; + sptr GetSurface() override; + VideoProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode OnStart() override; + VideoProcessing_ErrorCode OnStop() override; + VideoProcessing_ErrorCode OnRenderOutputBuffer(uint32_t index) override; + +private: + class NativeCallback : public ColorSpaceConverterVideoCallback { + public: + explicit NativeCallback(const std::shared_ptr& owner); + virtual ~NativeCallback() = default; + NativeCallback(const NativeCallback&) = delete; + NativeCallback& operator=(const NativeCallback&) = delete; + NativeCallback(NativeCallback&&) = delete; + NativeCallback& operator=(NativeCallback&&) = delete; + + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override; + void OnOutputFormatChanged(const Format& format) override; + + private: + void SendCallback(std::function&& callback) const; + + const std::shared_ptr owner_{}; + }; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr colorSpaceConverter_{}; + int32_t colorSpaceValue_ = 0; + int32_t formatValue_ = 0; + int32_t metadataValue_ = 0; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // COLORSPACE_CONVERTER_VIDEO_NATIVE_H diff --git a/framework/capi/video_processing/detail_enhancer/detail_enhancer_video_native.cpp b/framework/capi/video_processing/detail_enhancer/detail_enhancer_video_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dc5af52918696321d78274e963aa9b60e7ac54d7 --- /dev/null +++ b/framework/capi/video_processing/detail_enhancer/detail_enhancer_video_native.cpp @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail_enhancer_video_native.h" + +#include + +#include "vpe_log.h" +#include "video_processing_utils.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::unordered_map LEVEL_MAP = { + { VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_NONE, DETAIL_ENH_LEVEL_NONE }, + { VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_LOW, DETAIL_ENH_LEVEL_LOW }, + { VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_MEDIUM, DETAIL_ENH_LEVEL_MEDIUM }, + { VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH, DETAIL_ENH_LEVEL_HIGH }, +}; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::InitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + detailEnhancer_ = DetailEnhancerVideo::Create(); + CHECK_AND_RETURN_RET_LOG(detailEnhancer_ != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create detail enhancement failed!"); + isInitialized_ = true; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::DeinitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + detailEnhancer_ = nullptr; + isInitialized_ = false; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::RegisterCallback() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + auto callback = std::make_shared(shared_from_this()); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create callback failed!"); + CHECK_AND_RETURN_RET_LOG(detailEnhancer_->RegisterCallback(callback) == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "RegisterCallback failed!"); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::SetSurface(const sptr& surface) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "surface is null!"); + BufferRequestConfig bufferRequestConfig = surface->GetWindowConfig(); + surface->SetRequestWidthAndHeight(bufferRequestConfig.width, bufferRequestConfig.height); + detailEnhancer_->SetOutputSurface(surface); + return VIDEO_PROCESSING_SUCCESS; +} + +sptr DetailEnhancerVideoNative::GetSurface() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), nullptr, "Initialization failed!"); + return detailEnhancer_->GetInputSurface(); +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::SetParameter(const OHOS::Media::Format& parameter) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + int level; + CHECK_AND_RETURN_RET_LOG(parameter.GetIntValue(VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, level), + VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "No quality level!"); + int innerLevel = NDKLevelToInner(level); + CHECK_AND_RETURN_RET_LOG(innerLevel != -1, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "Quality level is invalid!"); + DetailEnhancerParameters param{}; + param.level = static_cast(innerLevel); + auto result = VideoProcessingUtils::InnerErrorToNDK(detailEnhancer_->SetParameter(param, VIDEO)); + if (result == VIDEO_PROCESSING_SUCCESS) { + level_ = level; + } + return result; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::GetParameter(OHOS::Media::Format& parameter) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + int level = level_.load(); + CHECK_AND_RETURN_RET_LOG(level != -1, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "No quality level set!"); + CHECK_AND_RETURN_RET_LOG(parameter.PutIntValue(VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, level), + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "Get parameter failed!"); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::OnStart() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK(detailEnhancer_->Start()); +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::OnStop() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK(detailEnhancer_->Stop()); +} + +VideoProcessing_ErrorCode DetailEnhancerVideoNative::OnRenderOutputBuffer(uint32_t index) +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK(detailEnhancer_->ReleaseOutputBuffer(index, true)); +} + +int DetailEnhancerVideoNative::NDKLevelToInner(int level) const +{ + auto it = LEVEL_MAP.find(level); + if (it == LEVEL_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid input level:%{public}d", level); + return -1; + } + return it->second; +} + +DetailEnhancerVideoNative::NativeCallback::NativeCallback(const std::shared_ptr& owner) + : owner_(owner) +{ +} + +void DetailEnhancerVideoNative::NativeCallback::OnError(VPEAlgoErrCode errorCode) +{ + SendCallback([this, &errorCode]() { + owner_->OnError(VideoProcessingUtils::InnerErrorToNDK(errorCode)); + }); +} + +void DetailEnhancerVideoNative::NativeCallback::OnState(VPEAlgoState state) +{ + SendCallback([this, &state]() { + owner_->OnState(VideoProcessingUtils::InnerStateToNDK(state)); + }); +} + +void DetailEnhancerVideoNative::NativeCallback::OnOutputBufferAvailable(uint32_t index, + [[maybe_unused]] DetailEnhBufferFlag flag) +{ + SendCallback([this, &index]() { + owner_->OnNewOutputBuffer(index); + }); +} + +void DetailEnhancerVideoNative::NativeCallback::SendCallback(std::function&& callback) const +{ + if (owner_ == nullptr) { + VPE_LOGE("owner is null!"); + return; + } + + callback(); +} diff --git a/framework/capi/video_processing/detail_enhancer/include/detail_enhancer_video_native.h b/framework/capi/video_processing/detail_enhancer/include/detail_enhancer_video_native.h new file mode 100644 index 0000000000000000000000000000000000000000..cc13251b53be44922b6e30d4329cc7081b197077 --- /dev/null +++ b/framework/capi/video_processing/detail_enhancer/include/detail_enhancer_video_native.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_VIDEO_NATIVE_H +#define DETAIL_ENHANCER_VIDEO_NATIVE_H + +#include +#include +#include + +#include "video_processing_native_template.h" + +#include "detail_enhancer_video.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Detail enhancer NDK interface implementaion. + */ +class DetailEnhancerVideoNative : public VideoProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(DetailEnhancerVideoNative); + + VideoProcessing_ErrorCode InitializeInner() override; + VideoProcessing_ErrorCode DeinitializeInner() override; + VideoProcessing_ErrorCode RegisterCallback() override; + VideoProcessing_ErrorCode SetSurface(const sptr& surface) override; + sptr GetSurface() override; + VideoProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode OnStart() override; + VideoProcessing_ErrorCode OnStop() override; + VideoProcessing_ErrorCode OnRenderOutputBuffer(uint32_t index) override; + +private: + class NativeCallback : public DetailEnhancerVideoCallback { + public: + explicit NativeCallback(const std::shared_ptr& owner); + virtual ~NativeCallback() = default; + NativeCallback(const NativeCallback&) = delete; + NativeCallback& operator=(const NativeCallback&) = delete; + NativeCallback(NativeCallback&&) = delete; + NativeCallback& operator=(NativeCallback&&) = delete; + + void OnError(VPEAlgoErrCode errorCode) override; + void OnState(VPEAlgoState state) override; + void OnOutputBufferAvailable(uint32_t index, DetailEnhBufferFlag flag) override; + + private: + void SendCallback(std::function&& callback) const; + + const std::shared_ptr owner_{}; + }; + + int NDKLevelToInner(int level) const; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr detailEnhancer_{}; + // Guarded by lock_ end + std::atomic level_{-1}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_VIDEO_NATIVE_H diff --git a/framework/capi/video_processing/include/video_environment_native.h b/framework/capi/video_processing/include/video_environment_native.h new file mode 100644 index 0000000000000000000000000000000000000000..0e75cb7b37216e361ed67e4d8e8d2be17340e04c --- /dev/null +++ b/framework/capi/video_processing/include/video_environment_native.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_ENVIRONMENT_NATIVE_H +#define VIDEO_ENVIRONMENT_NATIVE_H + +#include +#include + +#include "video_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Implementation for video processing environment. + */ +class VideoEnvironmentNative { +public: + static VideoEnvironmentNative& Get(); + + VideoProcessing_ErrorCode Initialize(); + VideoProcessing_ErrorCode Deinitialize(); + VideoProcessing_ErrorCode InitializeByDefault(); + VideoProcessing_ErrorCode DeinitializeByDefault(); + +private: + VideoEnvironmentNative() = default; + virtual ~VideoEnvironmentNative() = default; + VideoEnvironmentNative(const VideoEnvironmentNative&) = delete; + VideoEnvironmentNative& operator=(const VideoEnvironmentNative&) = delete; + VideoEnvironmentNative(VideoEnvironmentNative&&) = delete; + VideoEnvironmentNative& operator=(VideoEnvironmentNative&&) = delete; + + VideoProcessing_ErrorCode InitializeLocked(); + VideoProcessing_ErrorCode DeinitializeLocked(); + VideoProcessing_ErrorCode InitializeEnvLocked(); + VideoProcessing_ErrorCode DeinitializeEnvLocked(); + + std::mutex lock_{}; + // Guarded by lock_ begin + bool isExplicitInit_{}; + uint32_t referenceCount_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_ENVIRONMENT_NATIVE_H diff --git a/framework/capi/video_processing/include/video_processing_callback_impl.h b/framework/capi/video_processing/include/video_processing_callback_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..84d875f64508c30869706ccf994c351777489790 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_callback_impl.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_CALLBACK_IMPL_H +#define VIDEO_PROCESSING_CALLBACK_IMPL_H + +#include + +#include "video_processing_callback_native.h" + +struct VideoProcessing_Callback { +public: + static VideoProcessing_ErrorCode Create(VideoProcessing_Callback** instance); + static VideoProcessing_ErrorCode Destroy(VideoProcessing_Callback* instance); + + std::shared_ptr GetInnerCallback() const; + +private: + VideoProcessing_Callback(); + ~VideoProcessing_Callback(); + + std::shared_ptr videoProcessingCallback_{}; +}; + +#endif // VIDEO_PROCESSING_CALLBACK_IMPL_H diff --git a/framework/capi/video_processing/include/video_processing_callback_native.h b/framework/capi/video_processing/include/video_processing_callback_native.h new file mode 100644 index 0000000000000000000000000000000000000000..8c591ccdcc0b70e91b2a4f2f501cae2f5ee40723 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_callback_native.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_CALLBACK_NATIVE_H +#define VIDEO_PROCESSING_CALLBACK_NATIVE_H + +#include +#include +#include + +#include "video_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Implementation for video processing callback NDK. + */ +class VideoProcessingCallbackNative { +public: + VideoProcessingCallbackNative() = default; + virtual ~VideoProcessingCallbackNative() = default; + VideoProcessingCallbackNative(const VideoProcessingCallbackNative&) = delete; + VideoProcessingCallbackNative& operator=(const VideoProcessingCallbackNative&) = delete; + VideoProcessingCallbackNative(VideoProcessingCallbackNative&&) = delete; + VideoProcessingCallbackNative& operator=(VideoProcessingCallbackNative&&) = delete; + + VideoProcessing_ErrorCode BindOnError(OH_VideoProcessingCallback_OnError onError); + VideoProcessing_ErrorCode BindOnState(OH_VideoProcessingCallback_OnState onState); + VideoProcessing_ErrorCode BindOnNewOutputBuffer(OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer); + + bool IsValid() const; + bool IsModifiable() const; + void LockModifiers(); + void UnlockModifiers(); + bool HasOnNewOutputBuffer() const; + + virtual void OnError(OH_VideoProcessing* instance, VideoProcessing_ErrorCode errorCode, void* userData); + virtual void OnState(OH_VideoProcessing* instance, VideoProcessing_State state, void* userData); + virtual void OnNewOutputBuffer(OH_VideoProcessing* instance, uint32_t index, void* userData); + +protected: + VideoProcessing_ErrorCode BindFunction(std::function&& functionBinder); + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isValid_{false}; + std::atomic isModifiable_{true}; + OH_VideoProcessingCallback_OnError onError_{}; + OH_VideoProcessingCallback_OnState onState_{}; + OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_CALLBACK_NATIVE_H diff --git a/framework/capi/video_processing/include/video_processing_capi_capability.h b/framework/capi/video_processing/include/video_processing_capi_capability.h new file mode 100644 index 0000000000000000000000000000000000000000..a658327a0ee72976b6a129f302277822e180d23f --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_capi_capability.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_CAPI_CAPABILITY_H +#define VIDEO_PROCESSING_CAPI_CAPABILITY_H + +#include + +#include "v1_0/cm_color_space.h" +#include "native_buffer.h" +#include "surface_type.h" + +#include "video_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +using namespace HDI::Display::Graphic::Common::V1_0; + +const std::unordered_map NATIVE_COLORSPACE_TO_CM_MAP = { + {OH_COLORSPACE_BT601_EBU_LIMIT, CM_BT601_EBU_LIMIT}, + {OH_COLORSPACE_BT601_SMPTE_C_LIMIT, CM_BT601_SMPTE_C_LIMIT}, + {OH_COLORSPACE_BT709_LIMIT, CM_BT709_LIMIT}, + {OH_COLORSPACE_BT2020_HLG_LIMIT, CM_BT2020_HLG_LIMIT}, + {OH_COLORSPACE_BT2020_PQ_LIMIT, CM_BT2020_PQ_LIMIT}, + {OH_COLORSPACE_BT601_EBU_FULL, CM_BT601_EBU_FULL}, + {OH_COLORSPACE_BT601_SMPTE_C_FULL, CM_BT601_SMPTE_C_FULL}, + {OH_COLORSPACE_BT709_FULL, CM_BT709_FULL}, + {OH_COLORSPACE_BT2020_HLG_FULL, CM_BT2020_HLG_FULL}, + {OH_COLORSPACE_BT2020_PQ_FULL, CM_BT2020_PQ_FULL}, +}; +const std::unordered_map NATIVE_METADATATYPE_TO_CM_MAP = { + {OH_VIDEO_HDR_HLG, CM_VIDEO_HLG}, + {OH_VIDEO_HDR_HDR10, CM_VIDEO_HDR10}, + {OH_VIDEO_HDR_VIVID, CM_VIDEO_HDR_VIVID}, +}; +const std::unordered_map NATIVE_FORMAT_TO_GRAPHIC_MAP = { + {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, GRAPHIC_PIXEL_FMT_RGBA_8888}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, GRAPHIC_PIXEL_FMT_YCBCR_420_SP}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, GRAPHIC_PIXEL_FMT_YCRCB_420_SP}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, GRAPHIC_PIXEL_FMT_RGBA_1010102}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, GRAPHIC_PIXEL_FMT_YCBCR_P010}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, GRAPHIC_PIXEL_FMT_YCRCB_P010}, +}; + +class VideoProcessingCapiCapability { +public: + static bool IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo); + static bool IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_CAPI_CAPABILITY_H \ No newline at end of file diff --git a/framework/capi/video_processing/include/video_processing_capi_impl.h b/framework/capi/video_processing/include/video_processing_capi_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..45014f1ff184d6221c4867150e744be81baa6dec --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_capi_impl.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_CAPI_IMPL_H +#define VIDEO_PROCESSING_CAPI_IMPL_H + +#include "video_processing_capi_interface.h" +#include "algorithm_common.h" + +class VideoProcessingCapiImpl : public IVideoProcessingNdk { +public: + VideoProcessingCapiImpl() = default; + virtual ~VideoProcessingCapiImpl() = default; + VideoProcessingCapiImpl(const VideoProcessingCapiImpl&) = delete; + VideoProcessingCapiImpl& operator=(const VideoProcessingCapiImpl&) = delete; + VideoProcessingCapiImpl(VideoProcessingCapiImpl&&) = delete; + VideoProcessingCapiImpl& operator=(VideoProcessingCapiImpl&&) = delete; + + VideoProcessing_ErrorCode InitializeEnvironment() final; + VideoProcessing_ErrorCode DeinitializeEnvironment() final; + bool IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) final; + bool IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) final; + VideoProcessing_ErrorCode Create(OH_VideoProcessing** videoProcessor, int type) final; + VideoProcessing_ErrorCode Destroy(OH_VideoProcessing* videoProcessor) final; + VideoProcessing_ErrorCode RegisterCallback(OH_VideoProcessing* videoProcessor, + const VideoProcessing_Callback* callback, void* userData) final; + VideoProcessing_ErrorCode SetSurface(OH_VideoProcessing* videoProcessor, const OHNativeWindow* window) final; + VideoProcessing_ErrorCode GetSurface(OH_VideoProcessing* videoProcessor, OHNativeWindow** window) final; + VideoProcessing_ErrorCode SetParameter(OH_VideoProcessing* videoProcessor, + const OH_AVFormat* parameter) final; + VideoProcessing_ErrorCode GetParameter(OH_VideoProcessing* videoProcessor, OH_AVFormat* parameter) final; + VideoProcessing_ErrorCode Start(OH_VideoProcessing* videoProcessor) final; + VideoProcessing_ErrorCode Stop(OH_VideoProcessing* videoProcessor) final; + VideoProcessing_ErrorCode RenderOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index) final; + + VideoProcessing_ErrorCode Create(VideoProcessing_Callback** callback) final; + VideoProcessing_ErrorCode Destroy(VideoProcessing_Callback* callback) final; + VideoProcessing_ErrorCode BindOnError(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnError onError) final; + VideoProcessing_ErrorCode BindOnState(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnState onState) final; + VideoProcessing_ErrorCode BindOnNewOutputBuffer(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) final; +private: + std::shared_ptr openglContext_ {nullptr}; + VideoProcessing_ErrorCode OpenGLInit(); +}; + +#endif // VIDEO_PROCESSING_CAPI_IMPL_H diff --git a/framework/capi/video_processing/include/video_processing_factory.h b/framework/capi/video_processing/include/video_processing_factory.h new file mode 100644 index 0000000000000000000000000000000000000000..4115c183d148189689ef4892354210d165cf74c6 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_factory.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_FACTORY_H +#define VIDEO_PROCESSING_FACTORY_H + +#include + +#include "video_processing_interface.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Factory class for creating video processing object. + */ +class VideoProcessingFactory { +public: + static bool IsValid(int type); + static std::shared_ptr CreateVideoProcessing(int type, OH_VideoProcessing* context); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_FACTORY_H diff --git a/framework/capi/video_processing/include/video_processing_impl.h b/framework/capi/video_processing/include/video_processing_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..c155432f784bdd49d43c8ffe49d53555039712f3 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_impl.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_IMPL_H +#define VIDEO_PROCESSING_IMPL_H + +#include + +#include "video_processing_interface.h" +#include "algorithm_common.h" + +struct OH_VideoProcessing { +public: + static VideoProcessing_ErrorCode Create(OH_VideoProcessing** instance, int type, + std::shared_ptr openglContext); + static VideoProcessing_ErrorCode Destroy(OH_VideoProcessing* instance); + + std::shared_ptr GetVideoProcessing(); + +private: + OH_VideoProcessing(int type); + ~OH_VideoProcessing(); + + std::shared_ptr videoProcessing_{}; +}; + +#endif // VIDEO_PROCESSING_IMPL_H diff --git a/framework/capi/video_processing/include/video_processing_interface.h b/framework/capi/video_processing/include/video_processing_interface.h new file mode 100644 index 0000000000000000000000000000000000000000..c70cc2a6985876a3bddcf299a728e1cca6459590 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_interface.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_INTERFACE_H +#define VIDEO_PROCESSING_INTERFACE_H + +#include "video_processing_types.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Interface for video processing. + */ +class IVideoProcessingNative { +public: + virtual VideoProcessing_ErrorCode Initialize() = 0; + virtual VideoProcessing_ErrorCode Deinitialize() = 0; + virtual VideoProcessing_ErrorCode RegisterCallback(const VideoProcessing_Callback* callback, void* userData) = 0; + virtual VideoProcessing_ErrorCode SetSurface(const OHNativeWindow* window) = 0; + virtual VideoProcessing_ErrorCode GetSurface(OHNativeWindow** window) = 0; + virtual VideoProcessing_ErrorCode SetParameter(const OH_AVFormat* parameter) = 0; + virtual VideoProcessing_ErrorCode GetParameter(OH_AVFormat* parameter) = 0; + virtual VideoProcessing_ErrorCode Start() = 0; + virtual VideoProcessing_ErrorCode Stop() = 0; + virtual VideoProcessing_ErrorCode RenderOutputBuffer(uint32_t index) = 0; + +protected: + IVideoProcessingNative() = default; + virtual ~IVideoProcessingNative() = default; + IVideoProcessingNative(const IVideoProcessingNative&) = delete; + IVideoProcessingNative& operator=(const IVideoProcessingNative&) = delete; + IVideoProcessingNative(IVideoProcessingNative&&) = delete; + IVideoProcessingNative& operator=(IVideoProcessingNative&&) = delete; + +public: + std::shared_ptr openglContext_ {nullptr}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_INTERFACE_H diff --git a/framework/capi/video_processing/include/video_processing_native_base.h b/framework/capi/video_processing/include/video_processing_native_base.h new file mode 100644 index 0000000000000000000000000000000000000000..472d983f798ed3a46f7e5c49597a904a77dae0cd --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_native_base.h @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_NATIVE_BASE_H +#define VIDEO_PROCESSING_NATIVE_BASE_H + +#include +#include +#include +#include +#include +#include + +#include "common/native_mfmagic.h" +#include "surface.h" + +#include "video_processing_callback_native.h" +#include "video_processing_interface.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Base implementaion for video processing. + */ +class VideoProcessingNativeBase : public IVideoProcessingNative { +public: + VideoProcessing_ErrorCode Initialize() final; + VideoProcessing_ErrorCode Deinitialize() final; + VideoProcessing_ErrorCode RegisterCallback(const VideoProcessing_Callback* callback, void* userData) final; + VideoProcessing_ErrorCode SetSurface(const OHNativeWindow* window) final; + VideoProcessing_ErrorCode GetSurface(OHNativeWindow** window) final; + VideoProcessing_ErrorCode SetParameter(const OH_AVFormat* parameter) final; + VideoProcessing_ErrorCode GetParameter(OH_AVFormat* parameter) final; + VideoProcessing_ErrorCode Start() final; + VideoProcessing_ErrorCode Stop() final; + VideoProcessing_ErrorCode RenderOutputBuffer(uint32_t index) final; + +protected: + explicit VideoProcessingNativeBase(OH_VideoProcessing* context); + virtual ~VideoProcessingNativeBase() = default; + VideoProcessingNativeBase(const VideoProcessingNativeBase&) = delete; + VideoProcessingNativeBase& operator=(const VideoProcessingNativeBase&) = delete; + VideoProcessingNativeBase(VideoProcessingNativeBase&&) = delete; + VideoProcessingNativeBase& operator=(VideoProcessingNativeBase&&) = delete; + + virtual VideoProcessing_ErrorCode InitializeInner(); + virtual VideoProcessing_ErrorCode DeinitializeInner(); + virtual VideoProcessing_ErrorCode RegisterCallback(); + virtual VideoProcessing_ErrorCode SetSurface(const sptr& surface, const OHNativeWindow& window); + virtual VideoProcessing_ErrorCode SetSurface(const sptr& surface); + virtual sptr GetSurface(); + virtual VideoProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter); + virtual VideoProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter); + virtual VideoProcessing_ErrorCode OnStart(); + virtual VideoProcessing_ErrorCode OnStop(); + virtual VideoProcessing_ErrorCode OnRenderOutputBuffer(uint32_t index); + + // Called by child classes of features(Such as DetailEnhancerVideoNative) to send information to users' callbacks + void OnError(VideoProcessing_ErrorCode errorCode); + void OnState(VideoProcessing_State state); + void OnNewOutputBuffer(uint32_t index); + +private: + struct CallbackInfo { + std::shared_ptr callback{}; + void* userData{}; + bool operator<(const CallbackInfo& other) const + { + return (callback == other.callback) ? (userData < other.userData) : (callback < other.callback); + } + }; + + VideoProcessing_ErrorCode ExecuteWhenIdle(std::function&& task, + const std::string& errLog); + VideoProcessing_ErrorCode ExecuteWhenRunning(std::function&& task, + const std::string& errLog); + void OnCallback(std::function&, void*)>&& task, + const std::string& name); + void TraverseCallbacksLocked(std::function&, void*)>&& task); + void TraverseCallbacksExLocked(std::function&, void*)>&& task); + VideoProcessing_ErrorCode PrepareRegistrationLocked(); + + OH_VideoProcessing* context_{}; + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::atomic isRunning_{false}; + std::atomic hasInputSurface_{false}; + std::atomic hasOutputSurface_{false}; + // Guarded by lock_ end + std::atomic isOnNewOutputBuffer_{}; + mutable std::mutex callbackLock_{}; + // Guarded by callbackLock_ begin + std::atomic hasCallback_{false}; + std::atomic hasOnRenderOutputBuffer_{false}; + bool isInnerCallbackReady_{false}; + std::set callbacks_{}; + // Guarded by callbackLock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_NATIVE_BASE_H diff --git a/framework/capi/video_processing/include/video_processing_native_template.h b/framework/capi/video_processing/include/video_processing_native_template.h new file mode 100644 index 0000000000000000000000000000000000000000..6ca42971e95fd8c96f896c267358dacddc369e85 --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_native_template.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_NATIVE_TEMPLATE_H +#define VIDEO_PROCESSING_NATIVE_TEMPLATE_H + +#include + +#include "nocopyable.h" +#include "video_processing_native_base.h" + +#define DEFINE_WITH_DISALLOW_COPY_AND_MOVE(className) \ + className([[maybe_unused]] Protected mask, OH_VideoProcessing* context) \ + : VideoProcessingNativeTemplate(context) {} \ + virtual ~className() = default; \ + DISALLOW_COPY_AND_MOVE(className) + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Base implementaion for video processing. + */ +template +class VideoProcessingNativeTemplate : public VideoProcessingNativeBase, public std::enable_shared_from_this { +public: + static inline std::shared_ptr Create(OH_VideoProcessing* context) + { + return std::make_shared(Protected(), context); + } + +protected: + struct Protected { explicit Protected() = default; }; + + explicit VideoProcessingNativeTemplate(OH_VideoProcessing* context) : VideoProcessingNativeBase(context) {} + virtual ~VideoProcessingNativeTemplate() = default; + VideoProcessingNativeTemplate(const VideoProcessingNativeTemplate&) = delete; + VideoProcessingNativeTemplate& operator=(const VideoProcessingNativeTemplate&) = delete; + VideoProcessingNativeTemplate(VideoProcessingNativeTemplate&&) = delete; + VideoProcessingNativeTemplate& operator=(VideoProcessingNativeTemplate&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_NATIVE_TEMPLATE_H diff --git a/framework/capi/video_processing/include/video_processing_utils.h b/framework/capi/video_processing/include/video_processing_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..1d89f24afb2cc25d8d06e8e373159ab539dcc79f --- /dev/null +++ b/framework/capi/video_processing/include/video_processing_utils.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_UTILS_H +#define VIDEO_PROCESSING_UTILS_H + +#include + +#include "algorithm_errors.h" +#include "algorithm_video_common.h" +#include "video_processing_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Utility funcions for video processing NDK. + */ +class VideoProcessingUtils { +public: + static VideoProcessing_ErrorCode InnerErrorToNDK(VPEAlgoErrCode errorCode); + static VideoProcessing_State InnerStateToNDK(VPEAlgoState state); + static std::string ToString(VideoProcessing_ErrorCode errorCode); + static std::string ToString(VideoProcessing_State state); +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_UTILS_H diff --git a/framework/capi/video_processing/metadata_generator/include/metadata_generator_video_native.h b/framework/capi/video_processing/metadata_generator/include/metadata_generator_video_native.h new file mode 100644 index 0000000000000000000000000000000000000000..2f29b8799bbd168cd838d409d7478606733e0264 --- /dev/null +++ b/framework/capi/video_processing/metadata_generator/include/metadata_generator_video_native.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef METADATA_GENERATOR_VIDEO_NATIVE_H +#define METADATA_GENERATOR_VIDEO_NATIVE_H + +#include +#include +#include + +#include "video_processing_native_template.h" +#include "metadata_generator_video.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Metadata generator NDK interface implementaion. + */ +class MetadataGeneratorVideoNative : public VideoProcessingNativeTemplate { +public: + DEFINE_WITH_DISALLOW_COPY_AND_MOVE(MetadataGeneratorVideoNative); + + VideoProcessing_ErrorCode InitializeInner() override; + VideoProcessing_ErrorCode DeinitializeInner() override; + VideoProcessing_ErrorCode RegisterCallback() override; + VideoProcessing_ErrorCode SetSurface(const sptr& surface) override; + sptr GetSurface() override; + VideoProcessing_ErrorCode SetParameter(const OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode GetParameter(OHOS::Media::Format& parameter) override; + VideoProcessing_ErrorCode OnStart() override; + VideoProcessing_ErrorCode OnStop() override; + VideoProcessing_ErrorCode OnRenderOutputBuffer(uint32_t index) override; + +private: + class NativeCallback : public MetadataGeneratorVideoCallback { + public: + explicit NativeCallback(const std::shared_ptr& owner); + virtual ~NativeCallback() = default; + NativeCallback(const NativeCallback&) = delete; + NativeCallback& operator=(const NativeCallback&) = delete; + NativeCallback(NativeCallback&&) = delete; + NativeCallback& operator=(NativeCallback&&) = delete; + + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) override; + + private: + void SendCallback(std::function&& callback) const; + + const std::shared_ptr owner_{}; + }; + + mutable std::mutex lock_{}; + // Guarded by lock_ begin + std::atomic isInitialized_{false}; + std::shared_ptr metadataGenerator_{}; + // Guarded by lock_ end +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // METADATA_GENERATOR_VIDEO_NATIVE_H diff --git a/framework/capi/video_processing/metadata_generator/metadata_generator_video_native.cpp b/framework/capi/video_processing/metadata_generator/metadata_generator_video_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..17675b8c45f8bb94c107905b0d7833a807646016 --- /dev/null +++ b/framework/capi/video_processing/metadata_generator/metadata_generator_video_native.cpp @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "video_processing_utils.h" +#include "metadata_generator_video_native.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::InitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + metadataGenerator_ = MetadataGeneratorVideo::Create(openglContext_); + CHECK_AND_RETURN_RET_LOG(metadataGenerator_ != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create colorSpace converter failed!"); + isInitialized_ = true; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::DeinitializeInner() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + metadataGenerator_ = nullptr; + isInitialized_ = false; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::RegisterCallback() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + auto callback = std::make_shared(shared_from_this()); + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "Create callback failed!"); + CHECK_AND_RETURN_RET_LOG(metadataGenerator_->SetCallback(callback) == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "RegisterCallback failed!"); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::SetSurface(const sptr& surface) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "surface is null!"); + metadataGenerator_->SetOutputSurface(surface); + return VIDEO_PROCESSING_SUCCESS; +} + +sptr MetadataGeneratorVideoNative::GetSurface() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), nullptr, "Initialization failed!"); + return metadataGenerator_->CreateInputSurface(); +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::SetParameter(const OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::GetParameter(OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::OnStart() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + int32_t ret = 0; + ret = metadataGenerator_->Configure(); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + ret = metadataGenerator_->Prepare(); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + ret = metadataGenerator_->Start(); + if (ret != VPE_ALGO_ERR_OK) { + return VideoProcessingUtils::InnerErrorToNDK(static_cast(ret)); + } + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::OnStop() +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK(static_cast(metadataGenerator_->Stop())); +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNative::OnRenderOutputBuffer(uint32_t index) +{ + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "Initialization failed!"); + + return VideoProcessingUtils::InnerErrorToNDK( + static_cast(metadataGenerator_->ReleaseOutputBuffer(index, true))); +} + +MetadataGeneratorVideoNative::NativeCallback::NativeCallback( + const std::shared_ptr& owner) + : owner_(owner) +{ +} + +void MetadataGeneratorVideoNative::NativeCallback::OnError(int32_t errorCode) +{ + SendCallback([this, &errorCode]() { + owner_->OnError(VideoProcessingUtils::InnerErrorToNDK(static_cast(errorCode))); + }); +} + +void MetadataGeneratorVideoNative::NativeCallback::OnState(int32_t state) +{ + SendCallback([this, &state]() { + owner_->OnState(VideoProcessingUtils::InnerStateToNDK(static_cast(state))); + }); +} + +void MetadataGeneratorVideoNative::NativeCallback::OnOutputBufferAvailable(uint32_t index, + [[maybe_unused]] MdgBufferFlag flag) +{ + SendCallback([this, &index]() { + owner_->OnNewOutputBuffer(index); + }); +} + +void MetadataGeneratorVideoNative::NativeCallback::SendCallback(std::function&& callback) const +{ + if (owner_ == nullptr) { + VPE_LOGE("owner is null!"); + return; + } + + callback(); +} diff --git a/framework/capi/video_processing/video_environment_native.cpp b/framework/capi/video_processing/video_environment_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9dd32c0f21f02e8cdc15f4ee3abf94977b9e0599 --- /dev/null +++ b/framework/capi/video_processing/video_environment_native.cpp @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_environment_native.h" + +#include "vpe_log.h" +#include "video_processing_client.h" +#include "video_processing_utils.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +VideoEnvironmentNative& VideoEnvironmentNative::Get() +{ + static VideoEnvironmentNative instance{}; + return instance; +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::Initialize() +{ + std::lock_guard lock(lock_); + if (isExplicitInit_) [[unlikely]] { + VPE_LOGE("Repeated initialization of the video environment!"); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + isExplicitInit_ = true; + return InitializeLocked(); +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::Deinitialize() +{ + std::lock_guard lock(lock_); + if (!isExplicitInit_) [[unlikely]] { + VPE_LOGE("Repeated deinitialization of the video environment!"); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + isExplicitInit_ = false; + return DeinitializeLocked(); +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::InitializeByDefault() +{ + std::lock_guard lock(lock_); + return InitializeLocked(); +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::DeinitializeByDefault() +{ + std::lock_guard lock(lock_); + return DeinitializeLocked(); +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::InitializeLocked() +{ + if (referenceCount_ > 0) [[likely]] { + VPE_LOGD("already init(cnt:%{public}d)", referenceCount_); + referenceCount_++; + return VIDEO_PROCESSING_SUCCESS; + } + VPE_LOGD("start to initialize..."); + VideoProcessing_ErrorCode result = InitializeEnvLocked(); + VPE_LOGD("initialize ret:%{public}s", VideoProcessingUtils::ToString(result).c_str()); + if (result == VIDEO_PROCESSING_SUCCESS) [[likely]] { + referenceCount_++; + } + return result; +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::DeinitializeLocked() +{ + if (referenceCount_ > 1) [[likely]] { + VPE_LOGD("environment is still in use(cnt:%{public}d)", referenceCount_); + referenceCount_--; + return VIDEO_PROCESSING_SUCCESS; + } + VPE_LOGD("start to deinitialize..."); + VideoProcessing_ErrorCode result = DeinitializeEnvLocked(); + VPE_LOGD("deinitialize ret:%{public}s", VideoProcessingUtils::ToString(result).c_str()); + if (result == VIDEO_PROCESSING_SUCCESS) [[likely]] { + if (referenceCount_ <= 0) { + VPE_LOGE("referenceCount_ is less than 1, no need to deinitialize"); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + referenceCount_--; + } + return result; +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::InitializeEnvLocked() +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoEnvironmentNative::DeinitializeEnvLocked() +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/framework/capi/video_processing/video_processing_callback_impl.cpp b/framework/capi/video_processing/video_processing_callback_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..33bbe6daad22ca066ec02cca274fb0c97c68f4c3 --- /dev/null +++ b/framework/capi/video_processing/video_processing_callback_impl.cpp @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_callback_impl.h" + +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessing_ErrorCode VideoProcessing_Callback::Create(VideoProcessing_Callback** instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "instance is null!"); + + *instance = new(std::nothrow) VideoProcessing_Callback(); + CHECK_AND_RETURN_RET_LOG(*instance != nullptr, VIDEO_PROCESSING_ERROR_NO_MEMORY, "new callback out of memory!"); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessing_Callback::Destroy(VideoProcessing_Callback* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "instance is null!"); + auto innerCallback = instance->GetInnerCallback(); + if (innerCallback != nullptr && !innerCallback->IsModifiable()) { + VPE_LOGW("Callback is still in use, please stop before destory."); + return VIDEO_PROCESSING_ERROR_INVALID_INSTANCE; + } + delete instance; + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_Callback::VideoProcessing_Callback() +{ + videoProcessingCallback_ = std::make_shared(); +} + +VideoProcessing_Callback::~VideoProcessing_Callback() +{ + videoProcessingCallback_ = nullptr; +} + +std::shared_ptr VideoProcessing_Callback::GetInnerCallback() const +{ + return videoProcessingCallback_; +} diff --git a/framework/capi/video_processing/video_processing_callback_native.cpp b/framework/capi/video_processing/video_processing_callback_native.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2b47b48e44c75ab3e40dcd0e8e9d607f1e3ae20c --- /dev/null +++ b/framework/capi/video_processing/video_processing_callback_native.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_callback_native.h" + +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessing_ErrorCode VideoProcessingCallbackNative::BindOnError(OH_VideoProcessingCallback_OnError onError) +{ + CHECK_AND_RETURN_RET_LOG(onError != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "onError is null!"); + return BindFunction([this, &onError]() { onError_ = onError; }); +} + +VideoProcessing_ErrorCode VideoProcessingCallbackNative::BindOnState(OH_VideoProcessingCallback_OnState onState) +{ + CHECK_AND_RETURN_RET_LOG(onState != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "onState is null!"); + return BindFunction([this, &onState]() { onState_ = onState; }); +} + +VideoProcessing_ErrorCode VideoProcessingCallbackNative::BindOnNewOutputBuffer( + OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) +{ + CHECK_AND_RETURN_RET_LOG(onNewOutputBuffer != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "onNewOutputBuffer is null!"); + return BindFunction([this, &onNewOutputBuffer]() { onNewOutputBuffer_ = onNewOutputBuffer; }); +} + +bool VideoProcessingCallbackNative::IsValid() const +{ + return isValid_.load(); +} + +bool VideoProcessingCallbackNative::IsModifiable() const +{ + return isModifiable_.load(); +} + +void VideoProcessingCallbackNative::LockModifiers() +{ + std::lock_guard lock(lock_); + isModifiable_ = false; +} + +void VideoProcessingCallbackNative::UnlockModifiers() +{ + std::lock_guard lock(lock_); + isModifiable_ = true; +} + +bool VideoProcessingCallbackNative::HasOnNewOutputBuffer() const +{ + std::lock_guard lock(lock_); + return onNewOutputBuffer_ != nullptr; +} + +void VideoProcessingCallbackNative::OnError(OH_VideoProcessing* instance, VideoProcessing_ErrorCode errorCode, + void* userData) +{ + if (onError_ == nullptr) { + VPE_LOGD("onError_ is null!"); + return; + } + onError_(instance, errorCode, userData); +} + +void VideoProcessingCallbackNative::OnState(OH_VideoProcessing* instance, VideoProcessing_State state, void* userData) +{ + if (onState_ == nullptr) { + VPE_LOGD("onState_ is null!"); + return; + } + onState_(instance, state, userData); +} + +void VideoProcessingCallbackNative::OnNewOutputBuffer(OH_VideoProcessing* instance, uint32_t index, void* userData) +{ + if (onNewOutputBuffer_ == nullptr) { + VPE_LOGD("onNewOutputBuffer_ is null!"); + return; + } + onNewOutputBuffer_(instance, index, userData); +} + +VideoProcessing_ErrorCode VideoProcessingCallbackNative::BindFunction(std::function&& functionBinder) +{ + if (!isModifiable_.load()) { + return VIDEO_PROCESSING_ERROR_PROCESS_FAILED; + } + std::lock_guard lock(lock_); + functionBinder(); + if (!isValid_.load()) { + isValid_ = true; + } + return VIDEO_PROCESSING_SUCCESS; +} diff --git a/framework/capi/video_processing/video_processing_capi_capability.cpp b/framework/capi/video_processing/video_processing_capi_capability.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f61649895c1c5ce8de286cf244e4044066b13dfe --- /dev/null +++ b/framework/capi/video_processing/video_processing_capi_capability.cpp @@ -0,0 +1,361 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_capi_capability.h" + +#include +#include +#include +#include + +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; + +struct MetaSupportKey { + int32_t metadata; + int32_t colorSpace; + int32_t format; + + bool operator<(const MetaSupportKey& k) const + { + return metadata < k.metadata || + (metadata == k.metadata && colorSpace < k.colorSpace) || + (metadata == k.metadata && colorSpace == k.colorSpace && format < k.format); + } + + bool operator==(const MetaSupportKey& k) const + { + return metadata == k.metadata && colorSpace == k.colorSpace && format == k.format; + } +}; +template<> struct std::hash { + std::size_t operator()(const MetaSupportKey& k) const + { + return std::hash()(k.metadata) ^ std::hash()(k.colorSpace) ^ + std::hash()(k.format); + } +}; + +struct CscvSupportKey { + int32_t metadataIn; + int32_t colorSpaceIn; + int32_t formatIn; + int32_t metadataOut; + int32_t colorSpaceOut; + int32_t formatOut; + + bool operator<(const CscvSupportKey& k) const + { + return metadataIn < k.metadataIn || + (metadataIn == k.metadataIn && colorSpaceIn < k.colorSpaceIn) || + (metadataIn == k.metadataIn && colorSpaceIn == k.colorSpaceIn && formatIn < k.formatIn) || + (metadataIn == k.metadataIn && colorSpaceIn == k.colorSpaceIn && formatIn == k.formatIn && + metadataOut < k.metadataOut) || + (metadataIn == k.metadataIn && colorSpaceIn == k.colorSpaceIn && formatIn == k.formatIn && + metadataOut == k.metadataOut && colorSpaceOut < k.colorSpaceOut) || + (metadataIn == k.metadataIn && colorSpaceIn == k.colorSpaceIn && formatIn == k.formatIn && + metadataOut == k.metadataOut && colorSpaceOut == k.colorSpaceOut && formatOut < k.formatOut); + } + + bool operator==(const CscvSupportKey& k) const + { + return metadataIn == k.metadataIn && colorSpaceIn == k.colorSpaceIn && formatIn == k.formatIn && + metadataOut == k.metadataOut && colorSpaceOut == k.colorSpaceOut && formatOut == k.formatOut; + } +}; +template<> struct std::hash { + std::size_t operator()(const CscvSupportKey& k) const + { + return std::hash()(k.metadataIn) ^ std::hash()(k.colorSpaceIn) ^ + std::hash()(k.formatIn) ^ std::hash()(k.metadataOut) ^ + std::hash()(k.colorSpaceOut) ^ std::hash()(k.formatOut); + } +}; + +namespace { +const std::unordered_set VIDEO_META_SUPPORT_MAP = { + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, +}; +const std::unordered_set VIDEO_COLORSPACE_CONVERTER_SUPPORT_SDRTOSDR_MAP = { + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_METADATA_NONE, CM_BT601_EBU_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, +}; +const std::unordered_set VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOSDR_MAP = { + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_420_SP }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_METADATA_NONE, CM_BT709_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_8888 }, +}; +const std::unordered_set VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOHDR_MAP = { + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCBCR_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_YCRCB_P010 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, + { CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102, + CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, GRAPHIC_PIXEL_FMT_RGBA_1010102 }, +}; +} + +CscvSupportKey VideoColorSpaceInfoToCscvKey(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) +{ + int32_t metadataValeSrc = static_cast(CM_METADATA_NONE); + int32_t colorSpaceValeSrc = static_cast(OH_COLORSPACE_NONE); + int32_t formatValeSrc = static_cast(NATIVEBUFFER_PIXEL_FMT_BUTT); + int32_t metadataValeDst = static_cast(CM_METADATA_NONE); + int32_t colorSpaceValeDst = static_cast(OH_COLORSPACE_NONE); + int32_t formatValeDst = static_cast(NATIVEBUFFER_PIXEL_FMT_BUTT); + OH_NativeBuffer_Format formatIn = static_cast(sourceVideoInfo->pixelFormat); + auto itInFormat = NATIVE_FORMAT_TO_GRAPHIC_MAP.find(formatIn); + if (itInFormat != NATIVE_FORMAT_TO_GRAPHIC_MAP.end()) { + formatValeSrc = static_cast(itInFormat->second); + } + OH_NativeBuffer_Format formatOut = static_cast(destinationVideoInfo->pixelFormat); + auto itOutFormat = NATIVE_FORMAT_TO_GRAPHIC_MAP.find(formatOut); + if (itOutFormat != NATIVE_FORMAT_TO_GRAPHIC_MAP.end()) { + formatValeDst = static_cast(itOutFormat->second); + } + OH_NativeBuffer_ColorSpace colorIn = static_cast(sourceVideoInfo->colorSpace); + auto itInColorSpace = NATIVE_COLORSPACE_TO_CM_MAP.find(colorIn); + if (itInColorSpace != NATIVE_COLORSPACE_TO_CM_MAP.end()) { + colorSpaceValeSrc = static_cast(itInColorSpace->second); + } + OH_NativeBuffer_ColorSpace colorOut = static_cast(destinationVideoInfo->colorSpace); + auto itOutColorSpace = NATIVE_COLORSPACE_TO_CM_MAP.find(colorOut); + if (itOutColorSpace != NATIVE_COLORSPACE_TO_CM_MAP.end()) { + colorSpaceValeDst = static_cast(itOutColorSpace->second); + } + OH_NativeBuffer_MetadataType metaIn = static_cast(sourceVideoInfo->metadataType); + auto itInMetadata = NATIVE_METADATATYPE_TO_CM_MAP.find(metaIn); + if (itInMetadata != NATIVE_METADATATYPE_TO_CM_MAP.end()) { + metadataValeSrc = static_cast(itInMetadata->second); + } + OH_NativeBuffer_MetadataType metaOut = static_cast( + destinationVideoInfo->metadataType); + auto itOutMetadata = NATIVE_METADATATYPE_TO_CM_MAP.find(metaOut); + if (itOutMetadata != NATIVE_METADATATYPE_TO_CM_MAP.end()) { + metadataValeDst = static_cast(itOutMetadata->second); + } + CscvSupportKey keyReturn{ metadataValeSrc, colorSpaceValeSrc, formatValeSrc, + metadataValeDst, colorSpaceValeDst, formatValeDst }; + return keyReturn; +} +bool VideoProcessingCapiCapability::IsColorSpaceConversionSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceVideoInfo != nullptr, false, "sourceVideoInfo is null!"); + CHECK_AND_RETURN_RET_LOG(destinationVideoInfo != nullptr, false, "destinationVideoInfo is null!"); + CscvSupportKey keySource = VideoColorSpaceInfoToCscvKey(sourceVideoInfo, destinationVideoInfo); + auto it = VIDEO_COLORSPACE_CONVERTER_SUPPORT_SDRTOSDR_MAP.find(keySource); + if (it != VIDEO_COLORSPACE_CONVERTER_SUPPORT_SDRTOSDR_MAP.end()) { + return true; + } + it = VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOSDR_MAP.find(keySource); + if (it != VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOSDR_MAP.end()) { + return true; + } + it = VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOHDR_MAP.find(keySource); + if (it != VIDEO_COLORSPACE_CONVERTER_SUPPORT_HDRTOHDR_MAP.end()) { + return true; + } + VPE_LOGE("IsColorSpaceConversionSupported false (FormatIn:%{public}d, ColorIn:%{public}d, MetaIn:%{public}d, \ + FormatOut:%{public}d, ColorOut:%{public}d, MetaOut:%{public}d) !", + sourceVideoInfo->pixelFormat, sourceVideoInfo->colorSpace, sourceVideoInfo->metadataType, + destinationVideoInfo->pixelFormat, destinationVideoInfo->colorSpace, destinationVideoInfo->metadataType); + return false; +} + +MetaSupportKey VideoColorSpaceInfoToMetaKey(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) +{ + int32_t metadataVale = static_cast(CM_METADATA_NONE); + int32_t colorSpaceVale = static_cast(OH_COLORSPACE_NONE); + int32_t formatVale = static_cast(NATIVEBUFFER_PIXEL_FMT_BUTT); + OH_NativeBuffer_Format formatIn = static_cast(sourceVideoInfo->pixelFormat); + auto itInFormat = NATIVE_FORMAT_TO_GRAPHIC_MAP.find(formatIn); + if (itInFormat != NATIVE_FORMAT_TO_GRAPHIC_MAP.end()) { + formatVale = static_cast(itInFormat->second); + } + OH_NativeBuffer_ColorSpace colorIn = static_cast(sourceVideoInfo->colorSpace); + auto itInColorSpace = NATIVE_COLORSPACE_TO_CM_MAP.find(colorIn); + if (itInColorSpace != NATIVE_COLORSPACE_TO_CM_MAP.end()) { + colorSpaceVale = static_cast(itInColorSpace->second); + } + OH_NativeBuffer_MetadataType metaIn = static_cast(sourceVideoInfo->metadataType); + auto itInMetadata = NATIVE_METADATATYPE_TO_CM_MAP.find(metaIn); + if (itInMetadata != NATIVE_METADATATYPE_TO_CM_MAP.end()) { + metadataVale = static_cast(itInMetadata->second); + } + MetaSupportKey keyReturn{ metadataVale, colorSpaceVale, formatVale }; + return keyReturn; +} +bool VideoProcessingCapiCapability::IsMetadataGenerationSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) +{ + CHECK_AND_RETURN_RET_LOG(sourceVideoInfo != nullptr, false, "sourceVideoInfo is null!"); + MetaSupportKey keySource = VideoColorSpaceInfoToMetaKey(sourceVideoInfo); + auto it = VIDEO_META_SUPPORT_MAP.find(keySource); + if (it != VIDEO_META_SUPPORT_MAP.end()) { + return true; + } + VPE_LOGE("IsMetadataGenerationSupported false (Format:%{public}d, Color:%{public}d, Meta:%{public}d) !", + sourceVideoInfo->pixelFormat, sourceVideoInfo->colorSpace, sourceVideoInfo->metadataType); + return false; +} diff --git a/framework/capi/video_processing/video_processing_capi_impl.cpp b/framework/capi/video_processing/video_processing_capi_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8d5be624f3b9a3908bd083194facba1863fa2f0e --- /dev/null +++ b/framework/capi/video_processing/video_processing_capi_impl.cpp @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_capi_impl.h" +#include "video_processing_capi_capability.h" +#include "vpe_log.h" +#include "video_environment_native.h" +#include "video_processing_callback_impl.h" +#include "video_processing_impl.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +const int32_t VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION = 0x1; +const int32_t VIDEO_PROCESSING_TYPE_METADATA_GENERATION = 0x2; +const int32_t VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER = 0x4; +const char* VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL = "QualityLevel"; + +namespace { +// Call video processing interface +VideoProcessing_ErrorCode CallVideoProcessing(OH_VideoProcessing* videoProcessor, + std::function&)>&& operation) +{ + CHECK_AND_RETURN_RET_LOG(videoProcessor != nullptr, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, + "videoProcessor is null!"); + auto videoProcessing = videoProcessor->GetVideoProcessing(); + CHECK_AND_RETURN_RET_LOG(videoProcessing != nullptr, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, + "videoProcessor is invalid!"); + return operation(videoProcessing); +} +// Call video processing callback interface +VideoProcessing_ErrorCode CallVideoProcessingCallback(VideoProcessing_Callback* callback, + std::function&)>&& operation) +{ + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "callback is null!"); + auto innerCallback = callback->GetInnerCallback(); + CHECK_AND_RETURN_RET_LOG(innerCallback != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "callback is invalid!"); + return operation(innerCallback); +} +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::OpenGLInit() +{ + auto status = SetupOpengl(openglContext_); + CHECK_AND_RETURN_RET_LOG(status == static_cast(VIDEO_PROCESSING_SUCCESS), + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "OpenGLInit SetupOpengl fail!"); + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::InitializeEnvironment() +{ + CHECK_AND_RETURN_RET_LOG(OpenGLInit() == VIDEO_PROCESSING_SUCCESS, VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + "OpenGLInit failed!"); + return VideoEnvironmentNative::Get().Initialize(); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::DeinitializeEnvironment() +{ + return VideoEnvironmentNative::Get().Deinitialize(); +} + +bool VideoProcessingCapiImpl::IsColorSpaceConversionSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) +{ + return VideoProcessingCapiCapability::IsColorSpaceConversionSupported(sourceVideoInfo, destinationVideoInfo); +} + +bool VideoProcessingCapiImpl::IsMetadataGenerationSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) +{ + return VideoProcessingCapiCapability::IsMetadataGenerationSupported(sourceVideoInfo); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Create(OH_VideoProcessing** videoProcessor, int type) +{ + return OH_VideoProcessing::Create(videoProcessor, type, openglContext_); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Destroy(OH_VideoProcessing* videoProcessor) +{ + return OH_VideoProcessing::Destroy(videoProcessor); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::RegisterCallback(OH_VideoProcessing* videoProcessor, + const VideoProcessing_Callback* callback, void* userData) +{ + return CallVideoProcessing(videoProcessor, [&callback, &userData](std::shared_ptr& obj) { + return obj->RegisterCallback(callback, userData); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetSurface(OH_VideoProcessing* videoProcessor, + const OHNativeWindow* window) +{ + return CallVideoProcessing(videoProcessor, [&window](std::shared_ptr& obj) { + return obj->SetSurface(window); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::GetSurface(OH_VideoProcessing* videoProcessor, + OHNativeWindow** window) +{ + return CallVideoProcessing(videoProcessor, [&window](std::shared_ptr& obj) { + return obj->GetSurface(window); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetParameter(OH_VideoProcessing* videoProcessor, + const OH_AVFormat* parameter) +{ + return CallVideoProcessing(videoProcessor, [¶meter](std::shared_ptr& obj) { + return obj->SetParameter(parameter); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::GetParameter(OH_VideoProcessing* videoProcessor, + OH_AVFormat* parameter) +{ + return CallVideoProcessing(videoProcessor, [¶meter](std::shared_ptr& obj) { + return obj->GetParameter(parameter); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Start(OH_VideoProcessing* videoProcessor) +{ + return CallVideoProcessing(videoProcessor, [](std::shared_ptr& obj) { + return obj->Start(); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Stop(OH_VideoProcessing* videoProcessor) +{ + return CallVideoProcessing(videoProcessor, [](std::shared_ptr& obj) { + return obj->Stop(); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::RenderOutputBuffer(OH_VideoProcessing* videoProcessor, + uint32_t index) +{ + return CallVideoProcessing(videoProcessor, [&index](std::shared_ptr& obj) { + return obj->RenderOutputBuffer(index); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Create(VideoProcessing_Callback** callback) +{ + return VideoProcessing_Callback::Create(callback); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::Destroy(VideoProcessing_Callback* callback) +{ + return VideoProcessing_Callback::Destroy(callback); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnError(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnError onError) +{ + return CallVideoProcessingCallback(callback, [&onError](std::shared_ptr& obj) { + return obj->BindOnError(onError); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnState(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnState onState) +{ + return CallVideoProcessingCallback(callback, [&onState](std::shared_ptr& obj) { + return obj->BindOnState(onState); + }); +} + +VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnNewOutputBuffer(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) +{ + return CallVideoProcessingCallback(callback, + [&onNewOutputBuffer](std::shared_ptr& obj) { + return obj->BindOnNewOutputBuffer(onNewOutputBuffer); + }); +} + +IVideoProcessingNdk* CreateVideoProcessingNdk() +{ + return new(std::nothrow) VideoProcessingCapiImpl(); +} + +void DestroyVideoProcessingNdk(IVideoProcessingNdk* obj) +{ + CHECK_AND_RETURN_LOG(obj != nullptr, "VPE video processing is null!"); + VideoProcessingCapiImpl* impl = static_cast(obj); + delete impl; +} diff --git a/framework/capi/video_processing/video_processing_factory.cpp b/framework/capi/video_processing/video_processing_factory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c1f90f4564a21d4bbb2b65d1f599dca818ab01cb --- /dev/null +++ b/framework/capi/video_processing/video_processing_factory.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_factory.h" + +#include +#include + +#include "vpe_log.h" +#include "video_processing_native_template.h" +// NOTE: Add VPE feature header files like below. +// VPE feature header begin +#include "detail_enhancer_video_native.h" +#include "colorSpace_converter_video_native.h" +#include "metadata_generator_video_native.h" +// VPE feature header end + +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +template +std::shared_ptr Create(OH_VideoProcessing* context) +{ + return VideoProcessingNativeTemplate::Create(context); +} +// NOTE: Add VPE feature type like below. +// VPE feature map begin +const std::unordered_map(OH_VideoProcessing*)>> CREATORS = { + { VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER, Create }, + { VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION, Create }, + { VIDEO_PROCESSING_TYPE_METADATA_GENERATION, Create }, + // ... +}; +// VPE feature map end +} + +bool VideoProcessingFactory::IsValid(int type) +{ + return CREATORS.find(type) != CREATORS.end(); +} + +std::shared_ptr VideoProcessingFactory::CreateVideoProcessing(int type, + OH_VideoProcessing* context) +{ + if (context == nullptr) [[unlikely]] { + VPE_LOGE("Invalid input: context is null!"); + return nullptr; + } + auto it = CREATORS.find(type); + if (it == CREATORS.end()) { + VPE_LOGE("Unknown type:%{public}d!", type); + return nullptr; + } + return it->second(context); +} diff --git a/framework/capi/video_processing/video_processing_impl.cpp b/framework/capi/video_processing/video_processing_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1d3bd826010b3886a0a339c94bd4226e9c96c788 --- /dev/null +++ b/framework/capi/video_processing/video_processing_impl.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_impl.h" + +#include "video_processing_factory.h" +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessing_ErrorCode OH_VideoProcessing::Create(OH_VideoProcessing** instance, int type, + std::shared_ptr openglContext) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr && *instance == nullptr, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, + "VPE video processing instance is null or *instance is not null!"); + CHECK_AND_RETURN_RET_LOG(VideoProcessingFactory::IsValid(type), VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "VPE video processing type(%{public}d) is invalid!", type); + + *instance = new(std::nothrow) OH_VideoProcessing(type); + CHECK_AND_RETURN_RET_LOG(*instance != nullptr, VIDEO_PROCESSING_ERROR_NO_MEMORY, + "VPE video processing out of memory!"); + auto obj = (*instance)->GetVideoProcessing(); + CHECK_AND_RETURN_RET_LOG(obj != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, + "VPE video processing constructor failed!"); + obj->openglContext_ = openglContext; + return obj->Initialize(); +} + +VideoProcessing_ErrorCode OH_VideoProcessing::Destroy(OH_VideoProcessing* instance) +{ + CHECK_AND_RETURN_RET_LOG(instance != nullptr, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, + "VPE video processing instance is null!"); + auto obj = instance->GetVideoProcessing(); + CHECK_AND_RETURN_RET_LOG(obj != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "VPE video processing instance is empty!"); + auto errorCode = obj->Deinitialize(); + delete instance; + instance = nullptr; + return errorCode; +} + +OH_VideoProcessing::OH_VideoProcessing(int type) +{ + videoProcessing_ = VideoProcessingFactory::CreateVideoProcessing(type, this); +} + +OH_VideoProcessing::~OH_VideoProcessing() +{ + videoProcessing_ = nullptr; +} + +std::shared_ptr OH_VideoProcessing::GetVideoProcessing() +{ + return videoProcessing_; +} diff --git a/framework/capi/video_processing/video_processing_native_base.cpp b/framework/capi/video_processing/video_processing_native_base.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b8e5342045d62754688ce9d1577497257a44e689 --- /dev/null +++ b/framework/capi/video_processing/video_processing_native_base.cpp @@ -0,0 +1,317 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_native_base.h" + +#include "common/native_mfmagic.h" +#include "native_window.h" + +#include "video_environment_native.h" +#include "video_processing_callback_impl.h" +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +VideoProcessingNativeBase::VideoProcessingNativeBase(OH_VideoProcessing* context) : context_(context) +{ +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::Initialize() +{ + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(!isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already init!"); + auto result = InitializeInner(); + isInitialized_ = true; + return result; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::Deinitialize() +{ + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + std::lock_guard lock(lock_); + CHECK_AND_RETURN_RET_LOG(isInitialized_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Already deinit!"); + isInitialized_ = false; + auto result = DeinitializeInner(); + return result; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::RegisterCallback(const VideoProcessing_Callback* callback, + void* userData) +{ + CHECK_AND_RETURN_RET_LOG(callback != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "Callback is null!"); + CHECK_AND_RETURN_RET_LOG(callback->GetInnerCallback() != nullptr && callback->GetInnerCallback()->IsValid(), + VIDEO_PROCESSING_ERROR_INVALID_VALUE, "Callback is empty!"); + + return ExecuteWhenIdle([this, &callback, &userData]() { + std::lock_guard lock(callbackLock_); + VideoProcessing_ErrorCode errorCode = PrepareRegistrationLocked(); + if (errorCode != VIDEO_PROCESSING_SUCCESS) { + return errorCode; + } + auto [it, result] = callbacks_.insert({ callback->GetInnerCallback(), userData }); + CHECK_AND_RETURN_RET_LOG(result, VIDEO_PROCESSING_ERROR_PROCESS_FAILED, "Add callback failed!"); + hasCallback_ = true; + if (callback->GetInnerCallback()->HasOnNewOutputBuffer()) { + hasOnRenderOutputBuffer_ = true; + } + return VIDEO_PROCESSING_SUCCESS; + }, "Registration during running is not allowed!"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::SetSurface(const OHNativeWindow* window) +{ + CHECK_AND_RETURN_RET_LOG(window != nullptr && window->surface != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, + "window is null or surface buffer is null!"); + + std::lock_guard lock(lock_); + auto result = SetSurface(window->surface, *window); + hasOutputSurface_ = (result == VIDEO_PROCESSING_SUCCESS); + return result; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::GetSurface(OHNativeWindow** window) +{ + CHECK_AND_RETURN_RET_LOG(window != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "window is null!"); + return ExecuteWhenIdle([this, &window]() { + if (hasInputSurface_.load()) { + VPE_LOGE("Input surface is already created!"); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + + sptr surface = GetSurface(); + CHECK_AND_RETURN_RET_LOG(surface != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, "get surface failed!"); + *window = CreateNativeWindowFromSurface(&surface); + CHECK_AND_RETURN_RET_LOG(*window != nullptr, VIDEO_PROCESSING_ERROR_CREATE_FAILED, "create window failed!"); + hasInputSurface_ = true; + return VIDEO_PROCESSING_SUCCESS; + }, "Surface getting during running is not allowed!"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::SetParameter(const OH_AVFormat* parameter) +{ + CHECK_AND_RETURN_RET_LOG(parameter != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + return SetParameter(parameter->format_); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::GetParameter(OH_AVFormat* parameter) +{ + CHECK_AND_RETURN_RET_LOG(parameter != nullptr, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, "parameter is null!"); + return GetParameter(parameter->format_); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::Start() +{ + return ExecuteWhenIdle([this]() { + CHECK_AND_RETURN_RET_LOG(hasInputSurface_.load() && hasOutputSurface_.load() && hasCallback_.load(), + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "Input/Output surface is not ready or the callback is not registered!"); + + TraverseCallbacksLocked([](std::shared_ptr& cb, void*) { cb->LockModifiers(); }); + isRunning_ = true; + return OnStart(); + }, "Already start!"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::Stop() +{ + return ExecuteWhenRunning([this]() { + TraverseCallbacksLocked( + [](std::shared_ptr& cb, void*) { cb->UnlockModifiers(); }); + isRunning_ = false; + return OnStop(); + }, "Already stop!"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::RenderOutputBuffer(uint32_t index) +{ + CHECK_AND_RETURN_RET_LOG(hasOnRenderOutputBuffer_.load(), VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "The operation is not permitted without binding OnRenderOutputBuffer!"); + if (isOnNewOutputBuffer_.load()) { + return OnRenderOutputBuffer(index); + } + return ExecuteWhenRunning([this, &index]() { + return OnRenderOutputBuffer(index); + }, "RenderOutputBuffer must be called during running!"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::InitializeInner() +{ + return VideoEnvironmentNative::Get().InitializeByDefault(); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::DeinitializeInner() +{ + return VideoEnvironmentNative::Get().DeinitializeByDefault(); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::RegisterCallback() +{ + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::SetSurface(const sptr& surface, + [[maybe_unused]] const OHNativeWindow& window) +{ + return SetSurface(surface); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::SetSurface([[maybe_unused]] const sptr& surface) +{ + return VIDEO_PROCESSING_SUCCESS; +} + +sptr VideoProcessingNativeBase::GetSurface() +{ + return nullptr; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::SetParameter([[maybe_unused]] const OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::GetParameter([[maybe_unused]] OHOS::Media::Format& parameter) +{ + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::OnStart() +{ + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::OnStop() +{ + return VIDEO_PROCESSING_SUCCESS; +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::OnRenderOutputBuffer([[maybe_unused]] uint32_t index) +{ + return VIDEO_PROCESSING_SUCCESS; +} + +void VideoProcessingNativeBase::OnError(VideoProcessing_ErrorCode errorCode) +{ + OnCallback([this, &errorCode](std::shared_ptr& callback, void* userData) { + callback->OnError(context_, errorCode, userData); + }, "OnError"); +} + +void VideoProcessingNativeBase::OnState(VideoProcessing_State state) +{ + OnCallback([this, &state](std::shared_ptr& callback, void* userData) { + callback->OnState(context_, state, userData); + }, "OnState"); +} + +void VideoProcessingNativeBase::OnNewOutputBuffer(uint32_t index) +{ + if (!isInitialized_.load()) { + VPE_LOGI("Skip index:%{public}u buffer because VPE has been deinitialized.", index); + return; + } + OnCallback([this, &index](std::shared_ptr& callback, void* userData) { + if (callback->HasOnNewOutputBuffer()) { + isOnNewOutputBuffer_ = true; + callback->OnNewOutputBuffer(context_, index, userData); + isOnNewOutputBuffer_ = false; + } else { + OnRenderOutputBuffer(index); + } + }, "OnNewOutputBuffer"); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::ExecuteWhenIdle( + std::function&& task, const std::string& errLog) +{ + if (isRunning_.load()) { + VPE_LOGW("%{public}s", errLog.c_str()); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + + std::lock_guard lock(lock_); + if (isRunning_.load()) { + VPE_LOGW("%{public}s", errLog.c_str()); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + return task(); +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::ExecuteWhenRunning( + std::function&& task, const std::string& errLog) +{ + if (!isRunning_.load()) { + VPE_LOGW("%{public}s", errLog.c_str()); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + + std::lock_guard lock(lock_); + if (!isRunning_.load()) { + VPE_LOGW("%{public}s", errLog.c_str()); + return VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED; + } + return task(); +} + +void VideoProcessingNativeBase::OnCallback( + std::function&, void*)>&& task, + const std::string& name) +{ + std::lock_guard lock(callbackLock_); + TraverseCallbacksLocked(std::move(task)); +} + +void VideoProcessingNativeBase::TraverseCallbacksLocked( + std::function&, void*)>&& task) +{ + TraverseCallbacksExLocked([&task](std::shared_ptr& cb, void* userData) { + task(cb, userData); + return false; + }); +} + +void VideoProcessingNativeBase::TraverseCallbacksExLocked( + std::function&, void*)>&& task) +{ + for (auto& callback : callbacks_) { + std::shared_ptr cb = callback.callback; + if (cb == nullptr) [[unlikely]] { + VPE_LOGW("callback is null!"); + continue; + } + if (task(cb, callback.userData)) { + break; + } + } +} + +VideoProcessing_ErrorCode VideoProcessingNativeBase::PrepareRegistrationLocked() +{ + if (isInnerCallbackReady_) { + return VIDEO_PROCESSING_SUCCESS; + } + VideoProcessing_ErrorCode errorCode = RegisterCallback(); + if (errorCode == VIDEO_PROCESSING_SUCCESS) { + VPE_LOGD("Inner RegisterCallback success."); + isInnerCallbackReady_ = true; + } + return errorCode; +} diff --git a/framework/capi/video_processing/video_processing_utils.cpp b/framework/capi/video_processing/video_processing_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0e1ce5efcaa2ef6f22b5233b78545f856919ae34 --- /dev/null +++ b/framework/capi/video_processing/video_processing_utils.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_utils.h" + +#include + +#include "algorithm_utils.h" +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +const std::unordered_map ERROR_MAP = { + { VPE_ALGO_ERR_OK, VIDEO_PROCESSING_SUCCESS }, + { VPE_ALGO_ERR_NO_MEMORY, VIDEO_PROCESSING_ERROR_NO_MEMORY }, + { VPE_ALGO_ERR_INVALID_OPERATION, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_INVALID_VAL, VIDEO_PROCESSING_ERROR_INVALID_VALUE }, + { VPE_ALGO_ERR_UNKNOWN, VIDEO_PROCESSING_ERROR_UNKNOWN }, + { VPE_ALGO_ERR_INVALID_PARAM, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER }, + { VPE_ALGO_ERR_INIT_FAILED, VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED }, + { VPE_ALGO_ERR_EXTENSION_NOT_FOUND, VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING }, + { VPE_ALGO_ERR_EXTENSION_INIT_FAILED, VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED }, + { VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED, VIDEO_PROCESSING_ERROR_PROCESS_FAILED }, + { VPE_ALGO_ERR_NOT_IMPLEMENTED, VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING }, + { VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_INVALID_STATE, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED }, + { VPE_ALGO_ERR_EXTEND_START, VIDEO_PROCESSING_ERROR_UNKNOWN }, +}; +const std::unordered_map STATE_MAP = { + { VPEAlgoState::UNINITIALIZED, VIDEO_PROCESSING_STATE_STOPPED }, + { VPEAlgoState::INITIALIZED, VIDEO_PROCESSING_STATE_STOPPED }, + { VPEAlgoState::CONFIGURING, VIDEO_PROCESSING_STATE_STOPPED }, + { VPEAlgoState::CONFIGURED, VIDEO_PROCESSING_STATE_STOPPED }, + { VPEAlgoState::STOPPED, VIDEO_PROCESSING_STATE_STOPPED }, + { VPEAlgoState::RUNNING, VIDEO_PROCESSING_STATE_RUNNING }, + { VPEAlgoState::EOS, VIDEO_PROCESSING_STATE_RUNNING }, + { VPEAlgoState::ERROR, VIDEO_PROCESSING_STATE_STOPPED }, +}; +const std::unordered_map NDK_ERROR_STR_MAP = { + { VIDEO_PROCESSING_SUCCESS, VPE_TO_STR(VIDEO_PROCESSING_SUCCESS) }, + { VIDEO_PROCESSING_ERROR_INVALID_PARAMETER, VPE_TO_STR(VIDEO_PROCESSING_ERROR_INVALID_PARAMETER) }, + { VIDEO_PROCESSING_ERROR_UNKNOWN, VPE_TO_STR(VIDEO_PROCESSING_ERROR_UNKNOWN) }, + { VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, VPE_TO_STR(VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED) }, + { VIDEO_PROCESSING_ERROR_CREATE_FAILED, VPE_TO_STR(VIDEO_PROCESSING_ERROR_CREATE_FAILED) }, + { VIDEO_PROCESSING_ERROR_PROCESS_FAILED, VPE_TO_STR(VIDEO_PROCESSING_ERROR_PROCESS_FAILED) }, + { VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, VPE_TO_STR(VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING) }, + { VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, VPE_TO_STR(VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED) }, + { VIDEO_PROCESSING_ERROR_NO_MEMORY, VPE_TO_STR(VIDEO_PROCESSING_ERROR_NO_MEMORY) }, + { VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, VPE_TO_STR(VIDEO_PROCESSING_ERROR_INVALID_INSTANCE) }, + { VIDEO_PROCESSING_ERROR_INVALID_VALUE, VPE_TO_STR(VIDEO_PROCESSING_ERROR_INVALID_VALUE) }, +}; +const std::unordered_map NDK_STATE_STR_MAP = { + { VIDEO_PROCESSING_STATE_RUNNING, VPE_TO_STR(VIDEO_PROCESSING_STATE_RUNNING) }, + { VIDEO_PROCESSING_STATE_STOPPED, VPE_TO_STR(VIDEO_PROCESSING_STATE_STOPPED) }, +}; +} + +VideoProcessing_ErrorCode VideoProcessingUtils::InnerErrorToNDK(VPEAlgoErrCode errorCode) +{ + auto it = ERROR_MAP.find(errorCode); + if (it == ERROR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid error code:%{public}d", errorCode); + return VIDEO_PROCESSING_ERROR_UNKNOWN; + } + return it->second; +} + +VideoProcessing_State VideoProcessingUtils::InnerStateToNDK(VPEAlgoState state) +{ + auto it = STATE_MAP.find(state); + if (it == STATE_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid state:%{public}d", state); + return VIDEO_PROCESSING_STATE_STOPPED; + } + return it->second; +} + +std::string VideoProcessingUtils::ToString(VideoProcessing_ErrorCode errorCode) +{ + auto it = NDK_ERROR_STR_MAP.find(errorCode); + if (it == NDK_ERROR_STR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid error code:%{public}d", errorCode); + return "Unsupported error:" + std::to_string(static_cast(errorCode)); + } + return it->second; +} + +std::string VideoProcessingUtils::ToString(VideoProcessing_State state) +{ + auto it = NDK_STATE_STR_MAP.find(state); + if (it == NDK_STATE_STR_MAP.end()) [[unlikely]] { + VPE_LOGE("Invalid state:%{public}d", state); + return "Unsupported state:" + std::to_string(static_cast(state)); + } + return it->second; +} diff --git a/framework/dfx/include/vpe_log.h b/framework/dfx/include/vpe_log.h new file mode 100644 index 0000000000000000000000000000000000000000..eb612cb85ef51b3ba4633d044a06ca1da03fa0b2 --- /dev/null +++ b/framework/dfx/include/vpe_log.h @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_DFX_LOG_H +#define FRAMEWORK_DFX_LOG_H + +#include +#include "hilog/log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace VPELogFlag { +extern const bool VERBOSE_LOG; +} // VPELogFlag + +#undef LOG_DOMAIN +#define LOG_DOMAIN 0xD002B3F + +#undef LOG_TAG +#define LOG_TAG "VIDEOPROCESSENGINE" +#undef LOG_FMT +#define LOG_FMT "[%{public}s][%{public}s %{public}d] " + +#define VPE_LOGF(fmt, ...) HILOG_FATAL(LOG_CORE, LOG_FMT fmt, __FILE_NAME__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VPE_LOGE(fmt, ...) HILOG_ERROR(LOG_CORE, LOG_FMT fmt, __FILE_NAME__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VPE_LOGW(fmt, ...) HILOG_WARN(LOG_CORE, LOG_FMT fmt, __FILE_NAME__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VPE_LOGI(fmt, ...) HILOG_INFO(LOG_CORE, LOG_FMT fmt, __FILE_NAME__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VPE_LOGD(fmt, ...) HILOG_DEBUG(LOG_CORE, LOG_FMT fmt, __FILE_NAME__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define VPE_LOGD_LIMIT(frequency, fmt, ...) \ + do { \ + thread_local uint64_t currentTimes = 0; \ + if (currentTimes++ % ((uint64_t)(frequency)) == 0) { \ + VPE_LOGD(fmt, ##__VA_ARGS__); \ + } \ + } while (0) + +#define VPE_LOGV(fmt, ...) \ + if (VPELogFlag::VERBOSE_LOG) { \ + VPE_LOGD(fmt, ##__VA_ARGS__); \ + } + +#define CHECK_AND_RETURN_RET_LOG(cond, ret, fmt, ...) \ + do { \ + if (!(cond)) { \ + VPE_LOGE(fmt, ##__VA_ARGS__); \ + return ret; \ + } \ + } while (0) + +#define CHECK_AND_RETURN_LOG(cond, fmt, ...) \ + do { \ + if (!(cond)) { \ + VPE_LOGE(fmt, ##__VA_ARGS__); \ + return; \ + } \ + } while (0) + +#define CHECK_AND_BREAK_LOG(cond, fmt, ...) \ + if (1) { \ + if (!(cond)) { \ + VPE_LOGE(fmt, ##__VA_ARGS__); \ + break; \ + } \ + } else \ + void(0) + +#define CHECK_AND_CONTINUE_LOG(cond, fmt, ...) \ + if (1) { \ + if (!(cond)) { \ + VPE_LOGE(fmt, ##__VA_ARGS__); \ + continue; \ + } \ + } else \ + void(0) + +#define CHECK_AND_LOG(cond, fmt, ...) \ + do { \ + if (!(cond)) { \ + VPE_LOGE(fmt, ##__VA_ARGS__); \ + } \ + } while (0) + +#define VPE_LOG_PRINT_COLOR_SPACE_CAPBILITY(CSDesc, pixelFormat) \ + if (VPELogFlag::VERBOSE_LOG) { \ + VPE_LOGD("Cap: (pri=%{public}3d,trans=%{public}3d,mat=%{public}3d,range=%{public}3d,pixel=%{public}3d)", \ + (CSDesc).primaries, (CSDesc).transfunc, (CSDesc).matrix, (CSDesc).range, (pixelFormat)); \ + } + +#define VPE_LOG_PRINT_METADATA_GEN_CAPBILITY(CSDesc, pixelFormat, algoType) \ + if (VPELogFlag::VERBOSE_LOG) { \ + VPE_LOGI("Cap: (%{public}3d,%{public}3d,%{public}3d,%{public}3d,%{public}3d, %{public}3d)", \ + (CSDesc).primaries, (CSDesc).transfunc, (CSDesc).matrix, (CSDesc).range, (pixelFormat), algoType); \ + } + +#define POINTER_MASK 0x00FFFFFF +#define FAKE_POINTER(addr) (POINTER_MASK & reinterpret_cast(addr)) + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // FRAMEWORK_DFX_LOG_H diff --git a/framework/dfx/include/vpe_trace.h b/framework/dfx/include/vpe_trace.h new file mode 100644 index 0000000000000000000000000000000000000000..f1711f4c08ca09a08083752f77f8d2be3c6c1cb3 --- /dev/null +++ b/framework/dfx/include/vpe_trace.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAMEWORK_DFX_TRACE_H +#define FRAMEWORK_DFX_TRACE_H + +#include +#include "nocopyable.h" +#include "refbase.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +#define VPE_SYNC_TRACE VPETrace trace(std::string(__FUNCTION__)) + +class __attribute__((visibility("default"))) VPETrace : public NoCopyable { +public: + explicit VPETrace(const std::string& funcName); + static void TraceBegin(const std::string& funcName, int32_t taskId); + static void TraceEnd(const std::string& funcName, int32_t taskId); + static void CounterTrace(const std::string& varName, int32_t val); + ~VPETrace(); + +private: + bool isSync_ = false; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // FRAMEWORK_DFX_TRACE_H diff --git a/framework/dfx/vpe_log.cpp b/framework/dfx/vpe_log.cpp new file mode 100644 index 0000000000000000000000000000000000000000..08db058aca63027b95114ab812ad871b95773022 --- /dev/null +++ b/framework/dfx/vpe_log.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_log.h" +#include "syspara/parameters.h" + +namespace OHOS::Media::VideoProcessingEngine::VPELogFlag { + const bool VERBOSE_LOG = OHOS::system::GetBoolParameter("OHOS.Media.VideoProcessingEngine.VerboseLog", false); +} // OHOS::Media::VideoProcessingEngine::VPELogFlag \ No newline at end of file diff --git a/framework/dfx/vpe_trace.cpp b/framework/dfx/vpe_trace.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9933cd2bd462507ddc0847452193ffe77a9c9676 --- /dev/null +++ b/framework/dfx/vpe_trace.cpp @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_trace.h" +#include +#include "hitrace_meter.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +VPETrace::VPETrace(const std::string& funcName) +{ + StartTrace(HITRACE_TAG_ZMEDIA, funcName); + isSync_ = true; +} + +void VPETrace::TraceBegin(const std::string& funcName, int32_t taskId) +{ + StartAsyncTrace(HITRACE_TAG_ZMEDIA, funcName, taskId); +} + +void VPETrace::TraceEnd(const std::string& funcName, int32_t taskId) +{ + FinishAsyncTrace(HITRACE_TAG_ZMEDIA, funcName, taskId); +} + +void VPETrace::CounterTrace(const std::string& varName, int32_t val) +{ + CountTrace(HITRACE_TAG_ZMEDIA, varName, val); +} + +VPETrace::~VPETrace() +{ + if (isSync_) { + FinishTrace(HITRACE_TAG_ZMEDIA); + } +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/interfaces/inner_api/aihdr_enhancer.h b/interfaces/inner_api/aihdr_enhancer.h new file mode 100644 index 0000000000000000000000000000000000000000..478c7b6b3c54dd4474dccfa44c15e2667ee90305 --- /dev/null +++ b/interfaces/inner_api/aihdr_enhancer.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_AIHDR_ENHANCER_H +#define INTERFACES_INNER_API_AIHDR_ENHANCER_H + +#include +#include +#include + +#include "external_window.h" + +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) AihdrEnhancer { +public: + /* * + * @brief Create a AihdrEnhancer object. + * @syscap + * @return pointer of the AihdrEnhancer object. + * @since 14 + */ + static std::shared_ptr Create(); + + /* * + * @brief 设置参数 + * @syscap + * @param parameter 输入参数 + * @return 返回错误码VPEAlgoErrCode + * @since 14 + */ + virtual VPEAlgoErrCode SetParameter(const int& parameter) = 0; + + /* * + * @brief 查询参数 + * @syscap + * @param parameter 输出参数 + * @return 返回错误码VPEAlgoErrCode + * @since 14 + */ + virtual VPEAlgoErrCode GetParameter(int& parameter) const = 0; + + /* * + * @brief 用于解码后sdr视频帧生成128位lut曲线。 + * @syscap + * @param input 输入图片,生成的lut曲线写入该image + * @return 返回错误码VPEAlgoErrCode + * @since 14 + */ + virtual VPEAlgoErrCode Process(const sptr& input) = 0; + +protected: + virtual ~AihdrEnhancer() = default; +}; + +extern "C" int32_t AihdrEnhancerCreate(int32_t* instance); + +extern "C" int32_t AihdrEnhancerProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage); + +extern "C" int32_t AihdrEnhancerDestroy(int32_t* instance); + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_AIHDR_ENHANCER_H diff --git a/interfaces/inner_api/aihdr_enhancer_video.h b/interfaces/inner_api/aihdr_enhancer_video.h new file mode 100644 index 0000000000000000000000000000000000000000..57913ea830e816129c765c3c5e0499822272a44a --- /dev/null +++ b/interfaces/inner_api/aihdr_enhancer_video.h @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AIHDR_ENHANCER_VIDEO_H +#define AIHDR_ENHANCER_VIDEO_H + +#include +#include + +#include "external_window.h" + +#include "aihdr_enhancer_video_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +// typedef struct Surface Surface; + +class __attribute__((visibility("default"))) AihdrEnhancerVideo { +public: + static std::shared_ptr Create(); + virtual ~AihdrEnhancerVideo() = default; + /* * + * @brief Registers a AihdrEnhancerVideo callback. + * + * This function must be called before {@link Prepare} + * + * @param callback Indicates the callback to register. For details, see {@link AihdrEnhancerVideoCallback}. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t SetCallback(const std::shared_ptr &callback) = 0; + + /* * + * @brief Sets the window on which to render the output of this AihdrEnhancerVideo. + * + * This function must be called before {@link Prepare} + * + * @param window The output window. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t SetSurface(const OHNativeWindow* window) = 0; + + /* * + * @brief Obtains the surface from AihdrEnhancerVideo. + * + * This function can only be called before {@link Prepare} + * + * @return Returns the pointer to the surface. + * @since 5.0 + */ + virtual int32_t GetSurface(OHNativeWindow** window) = 0; + + /* * + * @brief Configure the AihdrEnhancerVideo. + * + * @param window The input window. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Configure() = 0; + + /* * + * @brief Prepare for AihdrEnhancerVideo. + * + * This function must be called before {@link Start} + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Prepare() = 0; + + /* * + * @brief Start AihdrEnhancerVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Start() = 0; + + /* * + * @brief Stop AihdrEnhancerVideo. + * + * This function must be called during running + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Stop() = 0; + + /* * + * @brief Restores the AihdrEnhancerVideo to the initial state. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Reset() = 0; + + /* * + * @brief Releases AihdrEnhancerVideo resources. All methods are unavailable after calling this. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Release() = 0; + + /* * + * @brief Notify eos of the AihdrEnhancerVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t NotifyEos() = 0; + + /* * + * @brief Returns the output buffer to the AihdrEnhancerVideo. + * + * This function must be called during running + * + * @param index The index of the output buffer. + * @param render Whether to render the buffer. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t ReleaseOutputBuffer(uint32_t index, bool render) = 0; + + virtual int32_t Flush() = 0; +}; +using ArgumentType = void; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // AIHDR_ENHANCER_VIDEO_H \ No newline at end of file diff --git a/interfaces/inner_api/aihdr_enhancer_video_common.h b/interfaces/inner_api/aihdr_enhancer_video_common.h new file mode 100644 index 0000000000000000000000000000000000000000..44361c0f44b714da33f76a0948ca732ce62c67c6 --- /dev/null +++ b/interfaces/inner_api/aihdr_enhancer_video_common.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef AIHDR_ENHANCER_VIDEO_COMMON_H +#define AIHDR_ENHANCER_VIDEO_COMMON_H +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum AihdrEnhancerBufferFlag : uint32_t { + AIHDR_ENHANCER_BUFFER_FLAG_NONE = 0, + /* This signals the end of stream */ + AIHDR_ENHANCER_BUFFER_FLAG_EOS = 1 << 0, +}; + +class __attribute__((visibility("default"))) AihdrEnhancerVideoCallback { +public: + virtual ~AihdrEnhancerVideoCallback() = default; + /* * + * Called when an error occurred. + * + * @param errorCode Error code. + * @since 5.0 + */ + virtual void OnError(int32_t errorCode) = 0; + + /* * + * Called when an state changed. + * + * @param state current state. + * @since 5.0 + */ + virtual void OnState(int32_t state) = 0; + + /* * + * Called when an output buffer becomes available. + * + * @param index The index of the available output buffer. + * @since 5.0 + */ + virtual void OnOutputBufferAvailable(uint32_t index, AihdrEnhancerBufferFlag flag) = 0; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // AIHDR_ENHANCER_VIDEO_COMMON_H diff --git a/interfaces/inner_api/algorithm_common.h b/interfaces/inner_api/algorithm_common.h new file mode 100644 index 0000000000000000000000000000000000000000..4fb22970fabdec544584febd2f07247ea686d390 --- /dev/null +++ b/interfaces/inner_api/algorithm_common.h @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_ALGORITHM_COMMON_H +#define INTERFACES_INNER_API_ALGORITHM_ALGORITHM_COMMON_H + +#include +#include +#include "refbase.h" +#include "surface_buffer.h" +#include "v1_0/buffer_handle_meta_key_type.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_errors.h" + +typedef struct OpenGLContext OpenGLContext; +typedef struct ClContext ClContext; +extern "C" int SetupOpencl(void **pHandle, const char *vendorName, char *deviceName); + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +constexpr uint8_t COLORPRIMARIES_OFFSET = 0; +constexpr uint8_t TRANSFUNC_OFFSET = 8; +constexpr uint8_t MATRIX_OFFSET = 16; +constexpr uint8_t RANGE_OFFSET = 21; +constexpr uint32_t COLORPRIMARIES_MASK = 0x000000FF; +constexpr uint32_t TRANSFUNC_MASK = 0x0000FF00; +constexpr uint32_t MATRIX_MASK = 0x001F0000; +constexpr uint32_t RANGE_MASK = 0xFFE00000; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +typedef struct { + unsigned int systemStartCode; // 表示系统版本号 + // minimum_maxrgb_pq,表示显示内容的最小亮度,pq域,范围从0~4095 + unsigned int minimumMaxRgbPq; + unsigned int averageMaxRgbPq; // average_maxrgb_pq,表示显示内容的平均亮度,pq域,范围从0.0~4095 + // variance_maxrgb_pq,表示显示内容的变化范围,pq域,范围从0.0~4095 + unsigned int varianceMaxRgbPq; + unsigned int maximumMaxRgbPq; // maximum_maxrgb_pq,表示显示内容的最大亮度,pq域,范围从0.0~4095 + // tone_mapping_enable_mode_flag,取值为0或1,若为0,则不传曲线参数,否则传输参数 + unsigned int toneMappingMode; + // tone_mapping_para_enable_num,表示当前色调映射参数组的数目减1, + // 为0时色调映射参数组的数目为1,为1时色调映射参数组的数目为2 + unsigned int toneMappingParamNum; + // targeted_system_display_maximum_luminancePq,参考目标显示器最高亮度,范围从0.0~4095, + // 数组长度为2说明有2组参数,tone_mapping_param_num + unsigned int targetedSystemDisplayMaximumLuminance[2]; + // base_enable_flag,基础曲线标识,取值为0或1,为0则不传输基础曲线参数,为1则传输参数 + unsigned int baseFlag[4]; + unsigned int baseParamMp[2]; // 范围0~16383 + unsigned int baseParamMm[2]; // 范围0~63 + unsigned int baseParamMa[2]; // 范围0~1023 + unsigned int baseParamMb[2]; // 范围0~1023 + unsigned int baseParamMn[2]; // 范围0~63 + unsigned int baseParamK1[2]; // 分小于等于1 和 大于1两种情况 + unsigned int baseParamK2[2]; // 分小于等于1 和 大于1两种情况 + unsigned int baseParamK3[2]; // 分小于等于1 和 1~2 和 大于2两种情况 + // base_param_delta_enable_mode,标识当前基础曲线映射参数的调整系数模式 + unsigned int baseParamDeltaMode[2]; + // base_param_enable_delta,标识当前基础曲线映射参数的调整系数值,范围0~127 + unsigned int baseParamDelta[2]; + // 3Spline_enable_flag,二值变量,为1时标识传输三次样条参数,为0时不传 + unsigned int threeSplineFlag[2]; + unsigned int threeSplineNum[2]; // 3Spline_enable_num,标识三次样条区间数量,取值为0和1 + // 3Spline_TH_enable_mode,标识色调映射的三次样条模式,范围为0~3, + // 这里数组长度4是由于P3Spline_num + unsigned int threeSplineThMode[2][4]; + // 3Spline_TH_enable_MB,指示色调映射的三次样条区间参数的斜率和暗区偏移量 + unsigned int threeSplineThMb[2][4]; + // 3Spline_TH_enable, 指示色调映射的三次样条区间参数,范围0~4095,第三维度表示三次样 + // 条区间参数(0~4095)、三次样条区间1偏移量(0~1023)和三次样条区间2偏移量(0~1023) + unsigned int threeSplineTh[2][4][3]; + // 3Spline_enable_Strength,指示色调映射的三次样条区间的修正幅度参数,范围0~255 + unsigned int threeSplineStrength[2][4]; + unsigned int colorSaturationMappingFlag; // 对应标准中color_saturation_mapping_enable_flag + unsigned int colorSaturationNum; // 对应标准中color_saturation_enable_num + unsigned int colorSaturationGain[16]; // 对应标准中color_saturation_enable_gain +} HdrVividMetadataV1; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +constexpr int MAX_IMAGE_SIZE = 8880; +using namespace HDI::Display::Graphic::Common::V1_0; + +struct ColorSpaceDescription { + CM_ColorSpaceInfo colorSpaceInfo; + CM_HDR_Metadata_Type metadataType; + + static VPEAlgoErrCode Create(const sptr &buffer, ColorSpaceDescription &desc); + bool operator < (const ColorSpaceDescription &desc) const; +}; + +enum class RenderIntent { + RENDER_INTENT_PERCEPTUAL, // 感性意图 + RENDER_INTENT_RELATIVE_COLORIMETRIC, // 相对比色意图 + RENDER_INTENT_ABSOLUTE_COLORIMETRIC, // 绝对比色渲染意图 + RENDER_INTENT_SATURATION // 饱和度意图 +}; + +enum class MetadataGeneratorAlgoType { + META_GEN_ALGO_TYPE_IMAGE, // image + META_GEN_ALGO_TYPE_VIDEO // video +}; + +struct ColorSpaceConverterParameter { + RenderIntent renderIntent; // 渲染意图 + std::optional sdrUIBrightnessRatio { + std::nullopt + }; // 为当输入sdr ui,需要配置值,亮度打折,sdr亮度调节系数。范围[1, 6]. + bool isVideo = false; // 当为视频抽帧资源时,配置为true不生成元数据。图片默认均生成元数据 +}; + +struct ColorSpaceConverterDisplayParameter { + ColorSpaceDescription inputColorSpace; // 色彩空间信息 + ColorSpaceDescription outputColorSpace; // 色彩空间信息 + std::vector staticMetadata; // 静态元数据 + std::vector dynamicMetadata; // 动态元数据 + std::vector layerLinearMatrix; // 线性域转换矩阵,作用在原色域上 + float tmoNits; // TMO目标亮度 + float currentDisplayNits; // 屏幕当前亮度,和tmoNits相除得到sdr亮度打折比 + float sdrNits; // SDR亮度 + int32_t width; // 宽度 + int32_t height; // 高度 + bool disableHdrFloatHeadRoom; // 不使用HDRfp16方案做额外提亮 + std::vector adaptiveFOVMetadata; // 缩放重适应元数据 +}; + +struct MetadataGeneratorParameter { + MetadataGeneratorAlgoType algoType = MetadataGeneratorAlgoType::META_GEN_ALGO_TYPE_IMAGE; + bool isOldHdrVivid = false; + float avgGainmapGray = 0.0; +}; + +uint32_t GetColorSpaceType(const CM_ColorSpaceInfo &colorSpaceInfo); +CM_ColorSpaceInfo GetColorSpaceInfo(const uint32_t colorSpaceType); +int SetupOpengl(std::shared_ptr &openglHandle); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // INTERFACES_INNER_API_ALGORITHM_ALGORITHM_COMMON_H diff --git a/interfaces/inner_api/algorithm_errors.h b/interfaces/inner_api/algorithm_errors.h new file mode 100644 index 0000000000000000000000000000000000000000..9664dd0f9d348affd15efe14569c6cbf149d838a --- /dev/null +++ b/interfaces/inner_api/algorithm_errors.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_ALGORITHM_ERRORS_H +#define INTERFACES_INNER_API_ALGORITHM_ALGORITHM_ERRORS_H + +#include +#include +#include "errors.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +// bit 28~21 is subsys, bit 20~16 is Module. bit 15~0 is code +constexpr ErrCode VPE_ALGO_MODULE = 11; +constexpr ErrCode VPE_ALGO_ERR_OFFSET = ErrCodeOffset(SUBSYS_MULTIMEDIA, VPE_ALGO_MODULE); +typedef enum VPEAlgoErrCode : ErrCode { + VPE_ALGO_ERR_OK = ERR_OK, + VPE_ALGO_ERR_NO_MEMORY = VPE_ALGO_ERR_OFFSET + ENOMEM, // no memory + VPE_ALGO_ERR_INVALID_OPERATION = VPE_ALGO_ERR_OFFSET + ENOSYS, // opertation not be permitted + VPE_ALGO_ERR_INVALID_VAL = VPE_ALGO_ERR_OFFSET + EINVAL, // invalid argument + VPE_ALGO_ERR_UNKNOWN = VPE_ALGO_ERR_OFFSET + 0x200, // unkown error. + VPE_ALGO_ERR_INIT_FAILED, // video processing engine init failed + VPE_ALGO_ERR_EXTENSION_NOT_FOUND, // extension not found + VPE_ALGO_ERR_EXTENSION_INIT_FAILED, // extension init failed + VPE_ALGO_ERR_EXTENSION_PROCESS_FAILED, // extension process failed + VPE_ALGO_ERR_NOT_IMPLEMENTED, // extension is not implemented + VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, // not supported operation + VPE_ALGO_ERR_INVALID_STATE, // the state no support this operation + VPE_ALGO_ERR_INVALID_PARAM, // invalid parameter. + + VPE_ALGO_ERR_EXTEND_START = VPE_ALGO_ERR_OFFSET + 0xF000, // extend err start. +} VPEAlgoErrCode; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // INTERFACES_INNER_API_ALGORITHM_ALGORITHM_ERRORS_H diff --git a/interfaces/inner_api/algorithm_video.h b/interfaces/inner_api/algorithm_video.h new file mode 100644 index 0000000000000000000000000000000000000000..6c680855e06eeff6061cf30c4ef0c879fbfd3b49 --- /dev/null +++ b/interfaces/inner_api/algorithm_video.h @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_VIDEO_H +#define INTERFACES_INNER_API_ALGORITHM_VIDEO_H + +#include +#include + +#include "meta/format.h" +#include "refbase.h" +#include "surface.h" + +#include "algorithm_errors.h" +#include "algorithm_video_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) VpeVideo { +public: + /** + * @brief Create a VpeVideo object. + * @param type Use VIDEO_TYPE_XXX to specify the processing type. For details, see {@link VpeVideoType}. + * @return If successful, returns a pointer to the VpeVideo object. On error, returns a null pointer. + * @since 5.1 + * @version 5.1 + */ + static std::shared_ptr Create(uint32_t type); + + /** + * @brief Register callback object. + * @param callback Callback object to be registered. For details, see {@link VpeVideoCallback}. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode RegisterCallback(const std::shared_ptr& callback); + + /** + * @brief Set the output surface for video processing. + * @param surface The output surface object. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode SetOutputSurface(const sptr& surface); + + /** + * @brief Create an input surface. + * @return If successful, returns a pointer to the input surface object. On error, returns a null pointer. + * @since 5.1 + * @version 5.1 + */ + virtual sptr GetInputSurface(); + + /** + * @brief Set parameter for video processing. Add parameter identified by the specified parameter key. + * @param parameter The parameter for video processing. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode SetParameter(const Format& parameter); + + /** + * @brief Get parameter of video processing. Get parameter identified by the specified parameter key. + * @param parameter The parameter of video processing. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode GetParameter(Format& parameter); + + /** + * @brief Start video processing. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode Start(); + + /** + * @brief Stop video processing. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode Stop(); + + /** + * @brief Flush both input and output buffers for video processing. + * + * This method must be called during running. + * + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode Flush(); + + /** + * @brief Enable video processing effect. It only can be called after {@link Disable}. + * + * This method must be called during running. + * + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode Enable(); + + /** + * @brief Disable video processing effect. If video processing is disabled, then the processor + * sends the surfacebuffer of the input surface to the output surface directly. + * + * This method must be called during running. + * + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode Disable(); + + /** + * @brief Notify the end of stream. + * + * This method must be called during running. + * + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode NotifyEos(); + + /** + * @brief Returns the output buffer to video processing. + * + * This method must be called during running. + * + * @param index The index of the output buffer. + * @param render Whether to render the buffer. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode ReleaseOutputBuffer(uint32_t index, bool render); + + /** + * @brief Send the output buffer out. + * + * This method must be called during running. + * + * @param index The index of the output buffer. + * @param renderTimestamp The timestamp is associated with the output buffer when it is sent to the surface. + * @return If successful, returns {@link VPE_ALGO_ERR_OK}. On error, retuns an error code. + * @since 5.1 + * @version 5.1 + */ + virtual VPEAlgoErrCode RenderOutputBufferAtTime(uint32_t index, int64_t renderTimestamp); + +protected: + VpeVideo() = default; + virtual ~VpeVideo() = default; + VpeVideo(const VpeVideo&) = delete; + VpeVideo& operator=(const VpeVideo&) = delete; + VpeVideo(VpeVideo&&) = delete; + VpeVideo& operator=(VpeVideo&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_VIDEO_H diff --git a/interfaces/inner_api/algorithm_video_common.h b/interfaces/inner_api/algorithm_video_common.h new file mode 100644 index 0000000000000000000000000000000000000000..7ce1e9472308b87365afe83e307c6c092ac3c178 --- /dev/null +++ b/interfaces/inner_api/algorithm_video_common.h @@ -0,0 +1,254 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_ALGORITHM_VIDEO_COMMON_H +#define INTERFACES_INNER_API_ALGORITHM_ALGORITHM_VIDEO_COMMON_H + +#include +#include +#include + +#include "meta/format.h" + +#include "algorithm_errors.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum class VPEAlgoState : int32_t { + UNINITIALIZED, + INITIALIZED, + CONFIGURING, + CONFIGURED, + STOPPED, + RUNNING, + EOS, + FLUSHED, + ERROR, +}; + +/** + * @brief Feature type of video processing. + * + * @since 5.1 + * @version 5.1 + */ +enum VpeVideoType : uint32_t { + /** + * @brief Used to create an video processing object of detail enhancement. + * + * Scale or resize video with the specified quality or just enhance details for rendering without changing its + * resolution. + * + * @since 5.1 + * @version 5.1 + */ + VIDEO_TYPE_DETAIL_ENHANCER = 0x4, +}; + +/** + * @brief Flag of video processing buffer. + * + * @since 5.1 + * @version 5.1 + */ +enum VpeBufferFlag : uint32_t { + VPE_BUFFER_FLAG_NONE = 0, + /** This signals the end of stream */ + VPE_BUFFER_FLAG_EOS = 1 << 0, +}; + +/** + * @brief Information of video processing buffer. + * + * @since 5.1 + * @version 5.1 + */ +struct VpeBufferInfo { + /** The flag of the available output buffer. For details, see {@link VpeBufferFlag}. */ + VpeBufferFlag flag{VPE_BUFFER_FLAG_NONE}; + /** presentationTimestamp The presentation timestamp for the buffer. */ + int64_t presentationTimestamp{-1}; +}; + +/** + * @brief Video processing callback base class, you can inherited it and only override partial methods as needed. + * + * @since 5.1 + * @version 5.1 + */ +class __attribute__((visibility("default"))) VpeVideoCallback { +public: + /** + * Called when an error occurred. + * + * @param errorCode Error code. For details, see {@link VPEAlgoErrCode}. + * @since 5.1 + * @version 5.1 + */ + virtual void OnError(VPEAlgoErrCode errorCode); + + /** + * Called when switch new state. + * + * @param state Current state. For details, see {@link VPEAlgoState}. + * @since 5.1 + * @version 5.1 + */ + virtual void OnState(VPEAlgoState state); + + /** + * Called when one of the features enable or disable effect. + * + * @param type Current enable processing type. For details, see {@link VpeVideoType}. + * If type is 0, no effect is enabled now. + * @since 5.1 + * @version 5.1 + */ + virtual void OnEffectChange(uint32_t type); + + /** + * Called when an output format changed. + * + * @param format Output surfacebuffer format. + * @since 5.1 + * @version 5.1 + */ + virtual void OnOutputFormatChanged(const Format& format); + + /** + * Called when an output buffer becomes available. + * + * @param index The index of the available output buffer. + * @param flag The flag of the available output buffer. For details, see {@link VpeBufferFlag}. + * @since 5.1 + * @version 5.1 + */ + virtual void OnOutputBufferAvailable(uint32_t index, VpeBufferFlag flag); + + /** + * Called when an output buffer becomes available. + * + * @param index The index of the available output buffer. + * @param info The information of the available output buffer. For details, see {@link VpeBufferInfo}. + * @since 5.1 + * @version 5.1 + */ + virtual void OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info); + +protected: + VpeVideoCallback() = default; + virtual ~VpeVideoCallback() = default; + VpeVideoCallback(const VpeVideoCallback&) = delete; + VpeVideoCallback& operator=(const VpeVideoCallback&) = delete; + VpeVideoCallback(VpeVideoCallback&&) = delete; + VpeVideoCallback& operator=(VpeVideoCallback&&) = delete; +}; + +/** + * @brief Width and height of the image buffer. + * + * It is the value of the key parameter {@link ParameterKey::DETAIL_ENHANCER_TARGET_SIZE}. + * + * @see VpeVideo::SetParameter + * @see VpeVideo::GetParameter + * @since 5.1 + * @version 5.1 + */ +struct VpeBufferSize { + int width{}; + int height{}; +}; + +/** + * @brief The quality level is used for detail enhancement. + * + * It is the value of the key parameter {@link ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL}. + * + * @see VpeVideo::SetParameter + * @see VpeVideo::GetParameter + * @since 5.1 + * @version 5.1 + */ +enum DetailEnhancerQualityLevel { + /** No detail enhancement */ + DETAIL_ENHANCER_LEVEL_NONE = 0, + /** A low level of detail enhancement quality but with a fast speed. It's the default level */ + DETAIL_ENHANCER_LEVEL_LOW, + /** A medium level of detail enhancement quality. Its speed is between the low setting and high setting */ + DETAIL_ENHANCER_LEVEL_MEDIUM, + /** A high level of detail enhancement quality but with a relatively slow speed */ + DETAIL_ENHANCER_LEVEL_HIGH, +}; + +/** + * @brief Contains the key corresponding to each paramter value. + * + * @see VpeVideo::SetParameter + * @see VpeVideo::GetParameter + * @since 5.1 + * @version 5.1 + */ +class ParameterKey { +public: + /** + * @brief The key is used to specify the quality level for video detail enhancement. + * + * See {@link DetailEnhancerQualityLevel} for its values. + * Use {@link VpeVideo::SetParameter} and {@link Format::SetIntValue} to set the quality level. + * Use {@link VpeVideo::GetParameter} and {@link Format::GetIntValue} to get the current quality level. + * + * @since 5.1 + * @version 5.1 + */ + static constexpr std::string_view DETAIL_ENHANCER_QUALITY_LEVEL{"QualityLevel"}; + + /** + * @brief The key is used to specify width and height of the target image. + * + * See {@link VpeBufferSize} for its values. + * Use {@link VpeVideo::SetParameter} and {@link Format::SetBuffer} to set the size of the target image. + * Use {@link VpeVideo::GetParameter} and {@link Format::GetBuffer} to get the current size of the target image. + * + * @since 5.1 + * @version 5.1 + */ + static constexpr std::string_view DETAIL_ENHANCER_TARGET_SIZE{"TargetSize"}; + + /** + * @brief The key is used to specify whether automatically downshift the quality level for detail + * enhancement or not. Default value is true. + * + * Use {@link VpeVideo::SetParameter} and {@link Format::SetIntValue} to set whether automatic downshift or not. + * Use {@link VpeVideo::GetParameter} and {@link Format::GetIntValue} to get whether automatic downshift or not. + * + * @since 5.1 + * @version 5.1 + */ + static constexpr std::string_view DETAIL_ENHANCER_AUTO_DOWNSHIFT{"AutoDownshift"}; + +private: + ParameterKey() = delete; + ~ParameterKey() = delete; + ParameterKey(const ParameterKey&) = delete; + ParameterKey& operator=(const ParameterKey&) = delete; + ParameterKey(ParameterKey&&) = delete; + ParameterKey& operator=(ParameterKey&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_ALGORITHM_VIDEO_COMMON_H diff --git a/interfaces/inner_api/colorspace_converter.h b/interfaces/inner_api/colorspace_converter.h new file mode 100644 index 0000000000000000000000000000000000000000..53612556da32ea4d5d91e0b24232c9c2921d2ea2 --- /dev/null +++ b/interfaces/inner_api/colorspace_converter.h @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_H +#define INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_H + +#include +#include +#include +#include "external_window.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Process + * 解码后的视频帧转换 + * 单层hdr图片 -> sdr图片 + * sdr图片 -> 单层hdr图片 + * sdr图片 -> sdr图片 + * ComposeImage + * 双层hdr图片 -> 单层hdr图片 + * DecomposeImage + * 单层hdr图片 -> 双层hdr图片 + * sdr图片 -> 双层hdr图片 + */ +class __attribute__((visibility("default"))) ColorSpaceConverter { +public: + /* * + * @brief Create a ColorspaceConverter object. + * @syscap + * @return pointer of the ColorspaceConverter object. + * @since 11 + */ + static std::shared_ptr Create(); + static std::shared_ptr Create(std::shared_ptr openglContext, + ClContext *opengclContext = nullptr); + + /* * + * @brief 设置色彩空间转换参数。可被多次调用,但只有最接近Process的一次调用设置的参数 + * 会在Process时生效。 + * @syscap + * @param parameter 转换参数 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode SetParameter(const ColorSpaceConverterParameter ¶meter) = 0; + + /* * + * @brief 查询参数 + * @syscap + * @param parameter 输出参数 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode GetParameter(ColorSpaceConverterParameter ¶meter) const = 0; + + /* * + * @brief 用于解码后的视频帧,以及单层图片的转换, + * 解码视频帧转换 + * 单层hdr图片 -> sdr图片 + * sdr图片 -> 单层hdr图片 + * sdr图片 -> sdr图片 + * @syscap + * @param input 输入的源视频帧、sdr图片或单层hdr图片 + * @param output 输出的视频帧、sdr图片或单层hdr图片 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode Process(const sptr &input, const sptr &output) = 0; + + /* * + * @brief 用于双层hdr图片转单层hdr图片。 + * @syscap + * @param inputSdrImage 输入的双层hdr图片的sdr图片部分 + * @param inputGainmap 输入的双层hdr图片的gainmap部分 + * @param outputHdrImage 输出的单层hdr图片 + * @param legacy 如果输入的双层hdr图片是老双层的格式,则应当设置为true;否则设置为false + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode ComposeImage(const sptr &inputSdrImage, + const sptr &inputGainmap, const sptr &outputHdrImage, bool legacy) = 0; + + /* * + * @brief 用于sdr图片或单层hdr图片转双层hdr图片。输出为新双层hdr图片格式。 + * @syscap + * @param inputImage 输入的sdr图片或单层hdr图片 + * @param outputSdrImage 输出的双层hdr图片的sdr图片部分 + * @param outputGainmap 输出的双层hdr图片的gainmap部分 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode DecomposeImage(const sptr &inputImage, + const sptr &outputSdrImage, const sptr &outputGainmap) = 0; + +protected: + virtual ~ColorSpaceConverter() = default; +}; + +extern "C" int32_t ColorSpaceConverterCreate(int32_t* instance); + +extern "C" int32_t ColorSpaceConverterProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage, + OHNativeWindowBuffer* outputImage); + +extern "C" int32_t ColorSpaceConverterComposeImage(int32_t instance, OHNativeWindowBuffer* inputSdrImage, + OHNativeWindowBuffer* inputGainmap, OHNativeWindowBuffer* outputHdrImage, bool legacy); + +extern "C" int32_t ColorSpaceConverterDecomposeImage(int32_t instance, OHNativeWindowBuffer* inputImage, + OHNativeWindowBuffer* outputSdrImage, OHNativeWindowBuffer* outputGainmap); + +extern "C" int32_t ColorSpaceConverterDestroy(int32_t* instance); + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_H diff --git a/interfaces/inner_api/colorspace_converter_display.h b/interfaces/inner_api/colorspace_converter_display.h new file mode 100644 index 0000000000000000000000000000000000000000..a87e9a57038a0e7ded0b0f2d1d480d4a69075fb1 --- /dev/null +++ b/interfaces/inner_api/colorspace_converter_display.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_H +#define INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_H + +#include +#include +#include +#include +#include "effect/shader_effect.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) ColorSpaceConverterDisplay { +public: + /* * + * @brief Create a ColorspaceConverterDisplay object. + * @syscap + * @param + * @return pointer of the ColorspaceConverterDisplay object. + * @since 4.1 + */ + static std::shared_ptr Create(); + + /* * + * @brief 构造显示场景中用于色彩空间转换的shader。 + * @syscap + * @param input 当前已经存在的shader + * @param output 在inputShader基础上添加了色彩空间转换的shader的新shader + * @param Parameter 构建色彩空间转换shader需要的参数 + * @return 返回错误码VPEAlgoErrCode + * @since 4.1 + */ + virtual VPEAlgoErrCode Process(const std::shared_ptr& input, + std::shared_ptr& output, + const ColorSpaceConverterDisplayParameter& parameter) = 0; +protected: + virtual ~ColorSpaceConverterDisplay() = default; +}; + +#ifdef __cplusplus +extern "C" { +#endif + +using ColorSpaceConvertDisplayHandle = void; +using VPEShaderEffectHandle = void; + +ColorSpaceConvertDisplayHandle *ColorSpaceConvertDisplayCreate(); +VPEAlgoErrCode ColorSpaceConvertDisplayProcess(ColorSpaceConvertDisplayHandle *handle, VPEShaderEffectHandle *input, + VPEShaderEffectHandle *output, const ColorSpaceConverterDisplayParameter ¶meter); +void ColorSpaceConvertDisplayDestroy(ColorSpaceConvertDisplayHandle *handle); + +#ifdef __cplusplus +} +#endif + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_COLORSPACE_CONVERTER_DISPLAY_H diff --git a/interfaces/inner_api/colorspace_converter_video.h b/interfaces/inner_api/colorspace_converter_video.h new file mode 100644 index 0000000000000000000000000000000000000000..8a22c77ae4e3b391a7898b658ca32f13057ada12 --- /dev/null +++ b/interfaces/inner_api/colorspace_converter_video.h @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLORSPACE_CONVERTER_VIDEO_H +#define COLORSPACE_CONVERTER_VIDEO_H +#include +#include "surface.h" +#include "meta/format.h" +#include "colorspace_converter_video_common.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) ColorSpaceConverterVideo { +public: + static std::shared_ptr Create(); + static std::shared_ptr Create(std::shared_ptr openglContext); + virtual ~ColorSpaceConverterVideo() = default; + /* * + * @brief Registers a ColorSpaceConverterVideo callback. + * + * This function must be called before {@link Prepare} + * + * @param callback Indicates the callback to register. For details, see {@link ColorSpaceConverterVideoCallback}. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t SetCallback(const std::shared_ptr &callback) = 0; + + /* * + * @brief Sets the surface on which to render the output of this ColorSpaceConverterVideo. + * + * This function must be called before {@link Prepare} + * + * @param surface The output surface. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t SetOutputSurface(sptr surface) = 0; + + /* * + * @brief Obtains the surface from ColorSpaceConverterVideo. + * + * This function can only be called before {@link Prepare} + * + * @return Returns the pointer to the surface. + * @since 4.1 + */ + virtual sptr CreateInputSurface() = 0; + + /* * + * @brief Sets the parameters to the ColorSpaceConverterVideo. + * + * This function must be called after {@link Prepare} + * + * @param parameter + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t SetParameter(const Format ¶meter) = 0; + + /* * + * @brief Gets the parameters of the ColorSpaceConverterVideo. + * + * This function must be called after {@link Prepare} + * + * @param parameter + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t GetParameter(Format ¶meter) = 0; + + /* * + * @brief Configure the ColorSpaceConverterVideo. + * + * @param format The format of the input data and the desired format of the output data. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Configure(const Format &format) = 0; + + /* * + * @brief Prepare for ColorSpaceConverterVideo. + * + * This function must be called before {@link Start} + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Prepare() = 0; + + /* * + * @brief Start ColorSpaceConverterVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Start() = 0; + + /* * + * @brief Stop ColorSpaceConverterVideo. + * + * This function must be called during running + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Stop() = 0; + + /* * + * @brief Restores the ColorSpaceConverterVideo to the initial state. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Reset() = 0; + + /* * + * @brief Releases ColorSpaceConverterVideo resources. All methods are unavailable after calling this. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t Release() = 0; + + /* * + * @brief Notify eos of the ColorSpaceConverterVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t NotifyEos() = 0; + + /* * + * @brief Returns the output buffer to the ColorSpaceConverterVideo. + * + * This function must be called during running + * + * @param index The index of the output buffer. + * @param render Whether to render the buffer. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 4.1 + */ + virtual int32_t ReleaseOutputBuffer(uint32_t index, bool render) = 0; + + virtual int32_t Flush() = 0; + /* * + * @brief Gets the format of the output data. + * + * @param format Obtain the required output data format. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t GetOutputFormat(Format &format) = 0; +}; + +#ifdef __cplusplus +extern "C" { +#endif + +using ColorSpaceConvertVideoHandle = void; +using ArgumentType = void; + +int32_t ColorSpaceConvertVideoIsColorSpaceConversionSupported(const ArgumentType* input, const ArgumentType* output); +ColorSpaceConvertVideoHandle* ColorSpaceConvertVideoCreate(); +void ColorSpaceConvertVideoDestroy(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoSetCallback(ColorSpaceConvertVideoHandle* handle, ArgumentType* callback, + ArgumentType* userData); +int32_t ColorSpaceConvertVideoSetOutputSurface(ColorSpaceConvertVideoHandle* handle, ArgumentType* surface); +int32_t ColorSpaceConvertVideoCreateInputSurface(ColorSpaceConvertVideoHandle* handle, ArgumentType* surface); +int32_t ColorSpaceConvertVideoSetParameter(ColorSpaceConvertVideoHandle* handle, ArgumentType* parameter); +int32_t ColorSpaceConvertVideoGetParameter(ColorSpaceConvertVideoHandle* handle, ArgumentType* parameter); +int32_t ColorSpaceConvertVideoConfigure(ColorSpaceConvertVideoHandle* handle, ArgumentType* configuration); +int32_t ColorSpaceConvertVideoPrepare(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoStart(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoStop(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoFlush(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoReset(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoRelease(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoNotifyEos(ColorSpaceConvertVideoHandle* handle); +int32_t ColorSpaceConvertVideoReleaseOutputBuffer(ColorSpaceConvertVideoHandle* handle, uint32_t index, bool render); +int32_t ColorSpaceConvertVideoGetOutputFormat(ColorSpaceConvertVideoHandle* handle, ArgumentType* format); +int32_t ColorSpaceConvertVideoOnProducerBufferReleased(ColorSpaceConvertVideoHandle *handle); + +#ifdef __cplusplus +} +#endif + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // COLORSPACE_CONVERTER_VIDEO_H \ No newline at end of file diff --git a/interfaces/inner_api/colorspace_converter_video_common.h b/interfaces/inner_api/colorspace_converter_video_common.h new file mode 100644 index 0000000000000000000000000000000000000000..2a7cf882e12444294d2ca0c5326c2b4917c3af5b --- /dev/null +++ b/interfaces/inner_api/colorspace_converter_video_common.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef CSCV_COMMON_H +#define CSCV_COMMON_H +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum CscvBufferFlag : uint32_t { + CSCV_BUFFER_FLAG_NONE = 0, + /* This signals the end of stream */ + CSCV_BUFFER_FLAG_EOS = 1 << 0, +}; + +class __attribute__((visibility("default"))) ColorSpaceConverterVideoCallback { +public: + virtual ~ColorSpaceConverterVideoCallback() = default; + /* * + * Called when an error occurred. + * + * @param errorCode Error code. + * @since 4.1 + */ + virtual void OnError(int32_t errorCode) = 0; + + /* * + * Called when an state changed. + * + * @param state current state. + * @since 5.0 + */ + virtual void OnState(int32_t state) = 0; + + /* * + * Called when an output buffer becomes available. + * + * @param index The index of the available output buffer. + * @since 4.1 + */ + virtual void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) = 0; + + /* * + * Called when an output format changed. + * + * @param format output surfacebuffer format. + * @since 5.0 + */ + virtual void OnOutputFormatChanged(const Format& format) = 0; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // CSCV_COMMON_H diff --git a/interfaces/inner_api/colorspace_converter_video_description.h b/interfaces/inner_api/colorspace_converter_video_description.h new file mode 100644 index 0000000000000000000000000000000000000000..ba0178e958f4ff38ff3d3ad79867fc2da066cda6 --- /dev/null +++ b/interfaces/inner_api/colorspace_converter_video_description.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CSCV_DESCRIPTION_H +#define CSCV_DESCRIPTION_H + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) CscVDescriptionKey { +public: + /* + * Key for colorspace type, value type is int32_t, see {link @CM_ColorPrimaries} + */ + static constexpr std::string_view CSCV_KEY_COLORSPACE_PRIMARIES = "colorspace_primaries"; + + /* + * Key for colorspace type, value type is int32_t, see {link @CM_TransFunc} + */ + static constexpr std::string_view CSCV_KEY_COLORSPACE_TRANS_FUNC = "colorspace_trans_func"; + + /* + * Key for colorspace type, value type is int32_t, see {link @CM_Matrix} + */ + static constexpr std::string_view CSCV_KEY_COLORSPACE_MATRIX = "colorspace_matrix"; + + /* + * Key for colorspace type, value type is int32_t, see {link @CM_Range} + */ + static constexpr std::string_view CSCV_KEY_COLORSPACE_RANGE = "colorspace_range"; + + /* + * Key for hdr metedata type, value type is int32_t, see {link @CM_HDRMetaDataType} + */ + static constexpr std::string_view CSCV_KEY_HDR_METADATA_TYPE = "hdr_metadata_type"; + + /* + * Key for render intent, value type is int32_t, see {link @RenderIntent} + */ + static constexpr std::string_view CSCV_KEY_RENDER_INTENT = "render_intent"; + + /* + * Key for brightness ratio for sdrui, value type is float + */ + static constexpr std::string_view CSCV_KEY_SDRUI_BRIGHTNESS_RATIO = "sdrui_brightness_ratio"; + + /* + * Key for pixel format, value type is int, see {link @GraphicPixelFormat} + */ + static constexpr std::string_view CSCV_KEY_PIXEL_FORMAT = "pixel_format"; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // CSCV_DESCRIPTION_H diff --git a/interfaces/inner_api/contrast_enhancer_common.h b/interfaces/inner_api/contrast_enhancer_common.h new file mode 100644 index 0000000000000000000000000000000000000000..d050138121b243eaef1edc33bf1d787b7cbb1fe4 --- /dev/null +++ b/interfaces/inner_api/contrast_enhancer_common.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CONTRAST_ENHANCER_COMMON_H +#define CONTRAST_ENHANCER_COMMON_H + +#include + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum ContrastEnhancerLevel { + CONTRAST_ENHANCER = 0, +}; + +struct ContrastEnhancerParameters { + std::string uri{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // CONTRAST_ENHANCER_COMMON_H + diff --git a/interfaces/inner_api/contrast_enhancer_image.h b/interfaces/inner_api/contrast_enhancer_image.h new file mode 100644 index 0000000000000000000000000000000000000000..3ad37a69ce90749d2cf860a7765dbd231c8af181 --- /dev/null +++ b/interfaces/inner_api/contrast_enhancer_image.h @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_VPE_CONTRAST_ENHANCER_IMAGE_H +#define INTERFACES_INNER_API_VPE_CONTRAST_ENHANCER_IMAGE_H + +#include + +#include "algorithm_errors.h" +#include "contrast_enhancer_common.h" +#include "external_window.h" +#include "refbase.h" +#include "surface_buffer.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Process + * 执行计算画质算法对图像质量进行增强,如:超分、AIHDR等: + * 1. AISR超分: + * 原始图片 -> 缩放后的目标尺寸图片 + * 2. AIHDR: + * 原始图片 -> 处理后的单层HDR图片 + */ +class __attribute__((visibility("default"))) ContrastEnhancerImage { +public: + /** + * @brief Create a ContrastEnhancerImage object. + * @syscap + * @param type 超分的类型为图片还是视频 + * @return pointer of the ContrastEnhancerImage object. + * @since 16 + */ + static std::shared_ptr Create(); + + /** + * @brief 设置计算画质参数。可被多次调用,但只有最接近Process的一次调用设置的参数 + * 会在Process时生效。 + * @syscap + * @param parameter 转换参数 + * @return 返回错误码VPEAlgoErrCode + * @since 16 + */ + virtual VPEAlgoErrCode SetParameter(const ContrastEnhancerParameters& parameter) = 0; + + /** + * @brief 查询参数 + * @syscap + * @param parameter 输出参数 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode GetParameter(ContrastEnhancerParameters& parameter) const = 0; + + /** + * @brief 初始化区域直方图 + * @syscap + * @param input 输入的sdr图片或单层hdr图片 + * @return 返回错误码VPEAlgoErrCode + * @since 16 + */ + virtual VPEAlgoErrCode GetRegionHist(const sptr& input) = 0; + + /** + * @brief 基于像素值更新元数据 + * @syscap + * @param displayArea 送显区域 + * @param curPixelmapArea 当前输入pixelmap在完整原图中的区域 + * @param completePixelmapArea 完整原图的分辨率信息 + * @param surfaceBuffer 当前输入pixelmap的surfacebuffer + * @param fullRatio 缩放比例 + * @return 返回错误码VPEAlgoErrCode + * @since 16 + */ + virtual VPEAlgoErrCode UpdateMetadataBasedOnDetail(OHOS::Rect displayArea, OHOS::Rect curPixelmapArea, + OHOS::Rect completePixelmapArea, sptr surfaceBuffer, float fullRatio) = 0; + + /** + * @brief 基于LCD图更新元数据 + * @syscap + * @param displayArea 送显区域 + * @param lcdWidth lcd图的宽度 + * @param lcdHeight lcd图的高度 + * @param surfaceBuffer 当前输入pixelmap的surfacebuffer + * @return 返回错误码VPEAlgoErrCode + * @since 16 + */ + virtual VPEAlgoErrCode UpdateMetadataBasedOnLcd(OHOS::Rect displayArea, int lcdWidth, int lcdHeight, + sptr surfaceBuffer) = 0; + +protected: + ContrastEnhancerImage() = default; + virtual ~ContrastEnhancerImage() = default; + ContrastEnhancerImage(const ContrastEnhancerImage&) = delete; + ContrastEnhancerImage& operator=(const ContrastEnhancerImage&) = delete; + ContrastEnhancerImage(ContrastEnhancerImage&&) = delete; + ContrastEnhancerImage& operator=(ContrastEnhancerImage&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_VPE_CONTRAST_ENHANCER_IMAGE_H diff --git a/interfaces/inner_api/detail_enhancer_common.h b/interfaces/inner_api/detail_enhancer_common.h new file mode 100644 index 0000000000000000000000000000000000000000..3d619cd2dfd7f58aaefe75d456a42bad98898209 --- /dev/null +++ b/interfaces/inner_api/detail_enhancer_common.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DETAIL_ENHANCER_COMMON_H +#define DETAIL_ENHANCER_COMMON_H + +#include + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum DetailEnhancerLevel { + DETAIL_ENH_LEVEL_NONE = 0, + DETAIL_ENH_LEVEL_LOW, + DETAIL_ENH_LEVEL_MEDIUM, + // High gear is the highest gear. If you need to add a higher gear, you should + // pay attention to the level configuration of the high gear in impl. + DETAIL_ENH_LEVEL_HIGH, +}; + +enum SourceType { + IMAGE = 0, + VIDEO, +}; + +struct DetailEnhancerParameters { + std::string uri{}; + DetailEnhancerLevel level{DETAIL_ENH_LEVEL_LOW}; + int forceEve{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // DETAIL_ENHANCER_COMMON_H + diff --git a/interfaces/inner_api/detail_enhancer_image.h b/interfaces/inner_api/detail_enhancer_image.h new file mode 100644 index 0000000000000000000000000000000000000000..490b0617ea6d4b47aaa2f834a9041c23d8cb0c99 --- /dev/null +++ b/interfaces/inner_api/detail_enhancer_image.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_IMAGE_H +#define INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_IMAGE_H + +#include + +#include "algorithm_errors.h" +#include "detail_enhancer_common.h" +#include "external_window.h" +#include "refbase.h" +#include "surface_buffer.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +/** + * Process + * 执行计算画质算法对图像质量进行增强,如:超分、AIHDR等: + * 1. AISR超分: + * 原始图片 -> 缩放后的目标尺寸图片 + * 2. AIHDR: + * 原始图片 -> 处理后的单层HDR图片 + */ +class __attribute__((visibility("default"))) DetailEnhancerImage { +public: + /** + * @brief Create a DetailEnhancerImage object. + * @syscap + * @param type 超分的类型为图片还是视频 + * @return pointer of the DetailEnhancerImage object. + * @since 12 + */ + static std::shared_ptr Create(int type = IMAGE); + + /** + * @brief 设置计算画质参数。可被多次调用,但只有最接近Process的一次调用设置的参数 + * 会在Process时生效。 + * @syscap + * @param parameter 转换参数 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode SetParameter(const DetailEnhancerParameters& parameter) = 0; + + /** + * @brief 查询参数 + * @syscap + * @param parameter 输出参数 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode GetParameter(DetailEnhancerParameters& parameter) const = 0; + + /** + * @brief 用于解码后的单层图片画质增强处理,如:超分、AIHDR等 + * @syscap + * @param input 输入的sdr图片或单层hdr图片 + * @param output 输出画质增强后的sdr图片或单层hdr图片 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode Process(const sptr& input, const sptr& output, + bool flag = false) = 0; + +protected: + DetailEnhancerImage() = default; + virtual ~DetailEnhancerImage() = default; + DetailEnhancerImage(const DetailEnhancerImage&) = delete; + DetailEnhancerImage& operator=(const DetailEnhancerImage&) = delete; + DetailEnhancerImage(DetailEnhancerImage&&) = delete; + DetailEnhancerImage& operator=(DetailEnhancerImage&&) = delete; +}; + +extern "C" __attribute__((visibility("default"))) int32_t DetailEnhancerCreate(int32_t* instance); +extern "C" __attribute__((visibility("default"))) int32_t DetailEnhancerProcessImage(int32_t instance, + OHNativeWindowBuffer* inputImage, OHNativeWindowBuffer* outputImage, int32_t level); +extern "C" __attribute__((visibility("default"))) int32_t DetailEnhancerDestroy(int32_t* instance); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_IMAGE_H diff --git a/interfaces/inner_api/detail_enhancer_video.h b/interfaces/inner_api/detail_enhancer_video.h new file mode 100644 index 0000000000000000000000000000000000000000..40d8206df1dbb8ccf2d934c20573df85b80303f0 --- /dev/null +++ b/interfaces/inner_api/detail_enhancer_video.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_H +#define INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_H + +#include + +#include "algorithm_errors.h" +#include "algorithm_video.h" +#include "detail_enhancer_common.h" +#include "detail_enhancer_video_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) DetailEnhancerVideo : public VpeVideo { +public: + using VpeVideo::SetParameter; + + /** + * @brief Create a DetailEnhancerVideo object. + * @syscap + * @return pointer of the DetailEnhancerVideo object. + * @since 12 + */ + static std::shared_ptr Create(); + + /** + * @brief 设置计算画质参数。可被多次调用,但只有最接近Process的一次调用设置的参数 + * 会在Process时生效。 + * @syscap + * @param parameter 转换参数 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode SetParameter(const DetailEnhancerParameters& parameter, SourceType type) = 0; + + /** + * @brief 渲染输出buffer. + * @syscap + * @param input 索引 + * @return 返回错误码VPEAlgoErrCode + * @since 12 + */ + virtual VPEAlgoErrCode RenderOutputBuffer(uint32_t index) = 0; + +protected: + DetailEnhancerVideo() = default; + virtual ~DetailEnhancerVideo() = default; + DetailEnhancerVideo(const DetailEnhancerVideo&) = delete; + DetailEnhancerVideo& operator=(const DetailEnhancerVideo&) = delete; + DetailEnhancerVideo(DetailEnhancerVideo&&) = delete; + DetailEnhancerVideo& operator=(DetailEnhancerVideo&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_H diff --git a/interfaces/inner_api/detail_enhancer_video_common.h b/interfaces/inner_api/detail_enhancer_video_common.h new file mode 100644 index 0000000000000000000000000000000000000000..09701ec46e4df940d34d360b892466fb7428d9d5 --- /dev/null +++ b/interfaces/inner_api/detail_enhancer_video_common.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_COMMON_H +#define INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_COMMON_H + +#include "algorithm_video_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +using DetailEnhBufferFlag = VpeBufferFlag; +#define DETAIL_ENH_BUFFER_FLAG_NONE VPE_BUFFER_FLAG_NONE +#define DETAIL_ENH_BUFFER_FLAG_EOS VPE_BUFFER_FLAG_EOS + +class __attribute__((visibility("default"))) DetailEnhancerVideoCallback : public VpeVideoCallback { +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_VPE_DETAIL_ENHANCER_VIDEO_COMMON_H diff --git a/interfaces/inner_api/metadata_generator.h b/interfaces/inner_api/metadata_generator.h new file mode 100644 index 0000000000000000000000000000000000000000..1bca172b302bbd15d391eabf024e263d49d27c95 --- /dev/null +++ b/interfaces/inner_api/metadata_generator.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_ALGORITHM_METADATA_GENERATOR_H +#define INTERFACES_INNER_API_ALGORITHM_METADATA_GENERATOR_H + +#include +#include +#include +#include "external_window.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) MetadataGenerator { +public: + /* * + * @brief Create a MetadataGenerator object. + * @syscap + * @return pointer of the MetadataGenerator object. + * @since 11 + */ + static std::shared_ptr Create(); + static std::shared_ptr Create(std::shared_ptr openglContext); + + /* * + * @brief 设置参数 + * @syscap + * @param parameter 输入参数 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode SetParameter(const MetadataGeneratorParameter ¶meter) = 0; + + /* * + * @brief 查询参数 + * @syscap + * @param parameter 输出参数 + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode GetParameter(MetadataGeneratorParameter ¶meter) const = 0; + + /* * + * @brief 用于解码后视频帧、sdr和单层hdr图片元数据生成。 + * @syscap + * @param input 输入图片,生成的元数据写入该image + * @return 返回错误码VPEAlgoErrCode + * @since 11 + */ + virtual VPEAlgoErrCode Process(const sptr &input) = 0; + +protected: + virtual ~MetadataGenerator() = default; +}; + +extern "C" int32_t MetadataGeneratorCreate(int32_t* instance); + +extern "C" int32_t MetadataGeneratorProcessImage(int32_t instance, OHNativeWindowBuffer* inputImage); + +extern "C" int32_t MetadataGeneratorDestroy(int32_t* instance); + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_ALGORITHM_METADATA_GENERATOR_H diff --git a/interfaces/inner_api/metadata_generator_video.h b/interfaces/inner_api/metadata_generator_video.h new file mode 100644 index 0000000000000000000000000000000000000000..7c7eb8e0f39af17cfdacd0242f38376cd5b041e7 --- /dev/null +++ b/interfaces/inner_api/metadata_generator_video.h @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef METADATA_GENERATOR_VIDEO_H +#define METADATA_GENERATOR_VIDEO_H +#include +#include "surface.h" +#include "metadata_generator_video_common.h" +#include "algorithm_common.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class __attribute__((visibility("default"))) MetadataGeneratorVideo { +public: + static std::shared_ptr Create(); + static std::shared_ptr Create(std::shared_ptr openglContext); + virtual ~MetadataGeneratorVideo() = default; + /* * + * @brief Registers a MetadataGeneratorVideo callback. + * + * This function must be called before {@link Prepare} + * + * @param callback Indicates the callback to register. For details, see {@link MetadataGeneratorVideoCallback}. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t SetCallback(const std::shared_ptr &callback) = 0; + + /* * + * @brief Sets the surface on which to render the output of this MetadataGeneratorVideo. + * + * This function must be called before {@link Prepare} + * + * @param surface The output surface. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t SetOutputSurface(sptr surface) = 0; + + /* * + * @brief Obtains the surface from MetadataGeneratorVideo. + * + * This function can only be called before {@link Prepare} + * + * @return Returns the pointer to the surface. + * @since 5.0 + */ + virtual sptr CreateInputSurface() = 0; + + /* * + * @brief Configure the MetadataGeneratorVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Configure() = 0; + + /* * + * @brief Prepare for MetadataGeneratorVideo. + * + * This function must be called before {@link Start} + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Prepare() = 0; + + /* * + * @brief Start MetadataGeneratorVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Start() = 0; + + /* * + * @brief Stop MetadataGeneratorVideo. + * + * This function must be called during running + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Stop() = 0; + + /* * + * @brief Restores the MetadataGeneratorVideo to the initial state. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Reset() = 0; + + /* * + * @brief Releases MetadataGeneratorVideo resources. All methods are unavailable after calling this. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t Release() = 0; + + /* * + * @brief Notify eos of the MetadataGeneratorVideo. + * + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t NotifyEos() = 0; + + /* * + * @brief Returns the output buffer to the MetadataGeneratorVideo. + * + * This function must be called during running + * + * @param index The index of the output buffer. + * @param render Whether to render the buffer. + * @return Returns {@link VPE_ALGO_ERR_OK} if success; returns an error code otherwise. + * @since 5.0 + */ + virtual int32_t ReleaseOutputBuffer(uint32_t index, bool render) = 0; + + virtual int32_t Flush() = 0; +}; +using ArgumentType = void; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // METADATA_GENERATOR_VIDEO_H \ No newline at end of file diff --git a/interfaces/inner_api/metadata_generator_video_common.h b/interfaces/inner_api/metadata_generator_video_common.h new file mode 100644 index 0000000000000000000000000000000000000000..8549df3ad3c29741335a351c51c4812cb0f98883 --- /dev/null +++ b/interfaces/inner_api/metadata_generator_video_common.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef METADATA_GENERATOR_VIDEO_COMMON_H +#define METADATA_GENERATOR_VIDEO_COMMON_H +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum MdgBufferFlag : uint32_t { + MDG_BUFFER_FLAG_NONE = 0, + /* This signals the end of stream */ + MDG_BUFFER_FLAG_EOS = 1 << 0, +}; + +class __attribute__((visibility("default"))) MetadataGeneratorVideoCallback { +public: + virtual ~MetadataGeneratorVideoCallback() = default; + /* * + * Called when an error occurred. + * + * @param errorCode Error code. + * @since 5.0 + */ + virtual void OnError(int32_t errorCode) = 0; + + /* * + * Called when an state changed. + * + * @param state current state. + * @since 5.0 + */ + virtual void OnState(int32_t state) = 0; + + /* * + * Called when an output buffer becomes available. + * + * @param index The index of the available output buffer. + * @since 5.0 + */ + virtual void OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) = 0; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // METADATA_GENERATOR_VIDEO_COMMON_H diff --git a/interfaces/inner_api/video_refreshrate_prediction.h b/interfaces/inner_api/video_refreshrate_prediction.h new file mode 100644 index 0000000000000000000000000000000000000000..eca8d4dae7668ea03df5e9d3e436e0a07f5631bd --- /dev/null +++ b/interfaces/inner_api/video_refreshrate_prediction.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_INNER_API_VPE_VIDEO_REFRESHRATE_PREDICTION_H +#define INTERFACES_INNER_API_VPE_VIDEO_REFRESHRATE_PREDICTION_H + +#include + +#include "algorithm_errors.h" +#include "external_window.h" +#include "refbase.h" +#include "surface_buffer.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +enum MotionVectorType : int32_t { + MOTIONVECTOR_TYPE_NONE = 0, + MOTIONVECTOR_TYPE_AVC = 1, + MOTIONVECTOR_TYPE_HEVC = 2 +}; + +class __attribute__((visibility("default"))) VideoRefreshRatePrediction { +public: + /** + * @brief Create a VideoRefreshRatePrediction object. + * @syscap + * @return pointer of the VideoRefreshRatePrediction object. + * @since 13 + */ + static std::shared_ptr Create(); + + /** + * @brief 硬件LTPO支持校验,调用方检查返回值 + * @syscap + * @return 返回错误码VPEAlgoErrCode + * @since 13 + */ + virtual VPEAlgoErrCode CheckVRRSupport(std::string processName) = 0; + + /** + * @brief 执行视频可变帧率算法 + * @syscap + * @param input 输入的解码帧,算法将决策的帧率信息写入SurfaceBuffer的ExtraData中 + * @param videoFps 输入视频帧的帧率 + * @param codecType 输入视频帧编码格式 MotionVectorType + * @return 返回错误码VPEAlgoErrCode + * @since 13 + */ + virtual VPEAlgoErrCode Process(const sptr& input, int videoFps, int codecType) = 0; +protected: + virtual ~VideoRefreshRatePrediction() = default; +}; + +#ifdef __cplusplus +extern "C" { +#endif + +using VideoRefreshRatePredictionHandle = void; + +VideoRefreshRatePredictionHandle *VideoRefreshRatePredictionCreate(); +void VideoRefreshRatePredictionDestroy(VideoRefreshRatePredictionHandle *handle); +int32_t VideoRefreshRatePredictionCheckSupport(VideoRefreshRatePredictionHandle *handle, const char *processName); +void VideoRefreshRatePredictionProcess(VideoRefreshRatePredictionHandle *handle, + OH_NativeBuffer* inputImageNativeBuffer, int videoFps, int codecType); + +#ifdef __cplusplus +} +#endif + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // INTERFACES_INNER_API_VPE_VIDEO_REFRESHRATE_PREDICTION_H diff --git a/interfaces/kits/js/detail_enhance_napi.h b/interfaces/kits/js/detail_enhance_napi.h new file mode 100644 index 0000000000000000000000000000000000000000..793693a8af812f53ce3fb3bc4f2ef74cf5cdb750 --- /dev/null +++ b/interfaces/kits/js/detail_enhance_napi.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_H +#define INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_H + +#include "detail_enhancer_image.h" +#include "image_type.h" +#include "pixel_map_napi.h" + +namespace OHOS { +namespace Media { +class DetailEnhanceNapi { +public: + DetailEnhanceNapi(); + ~DetailEnhanceNapi(); + + static napi_value Process(napi_env env, napi_callback_info info); + static napi_value Init(napi_env env, napi_callback_info info); + static napi_value Destroy(napi_env env, napi_callback_info info); +private: + struct DetailEnhanceContext { + napi_env env; + napi_deferred deferred; + napi_ref callbackRef; + std::shared_ptr inputPixelMap = nullptr; + double xArg = 0; + double yArg = 0; + }; + struct NapiValues { + napi_status status; + napi_value thisVar = nullptr; + napi_value result = nullptr; + napi_value* argv = nullptr; + size_t argc; + std::unique_ptr context; + }; + + static ImageType ParserImageType(napi_env env, napi_value argv); + static bool PrepareNapiEnv(napi_env env, napi_callback_info info, struct NapiValues* nVal); + static napi_value DetailEnhanceImpl(napi_env env, std::unique_ptr& context); + static void SetDstPixelMapInfo(OHOS::Media::PixelMap &source, void* dstPixels, uint32_t dstPixelsSize, + std::unique_ptr& memory, OHOS::Media::PixelMap &dstPixelMap); + static bool AllocMemory(OHOS::Media::PixelMap &source, OHOS::Media::PixelMap &dstPixelMap); + static std::unique_ptr CreateDstPixelMap(OHOS::Media::PixelMap &source, + const OHOS::Media::InitializationOptions &opts); + static sptr GetSurfaceBufferFromDMAPixelMap( + const std::shared_ptr& pixelmap); +}; + +static std::shared_ptr mDetailEnh; +static std::mutex lock_{std::mutex()}; +} +} +#endif // INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_H \ No newline at end of file diff --git a/interfaces/kits/js/detail_enhance_napi_formal.h b/interfaces/kits/js/detail_enhance_napi_formal.h new file mode 100644 index 0000000000000000000000000000000000000000..bf57a1dc5eba05742639254f090fe29685b0bbd1 --- /dev/null +++ b/interfaces/kits/js/detail_enhance_napi_formal.h @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_FORMAL_H +#define INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_FORMAL_H + +#include "detail_enhancer_image.h" +#include "contrast_enhancer_image.h" +#include "image_type.h" +#include "pixel_map_napi.h" + +namespace OHOS { +namespace Media { +class VpeNapi { +public: + VpeNapi() = default; + ~VpeNapi() = default; + + static napi_value Init(napi_env env, napi_value exports); + static napi_value Create(napi_env env, napi_callback_info info); + static napi_value EnhanceDetail(napi_env env, napi_callback_info info); + static napi_value EnhanceDetailSync(napi_env env, napi_callback_info info); + static napi_value SetDetailImage(napi_env env, napi_callback_info info); + static napi_value SetLcdImage(napi_env env, napi_callback_info info); + static napi_value RegisterCallback(napi_env env, napi_callback_info info); + static napi_value InitializeEnvironment(napi_env env, napi_callback_info info); + static napi_value DeinitializeEnvironment(napi_env env, napi_callback_info info); + +private: + struct DetailEnhanceContext { + napi_async_work asyncWork{}; // asynchronous work object + napi_deferred deferred{}; // Delayed execution object (used to return calculation results in Promise) + napi_ref callbackRef{}; // Reference object of callback func(used to return calculation results) + double xArg{}; + double yArg{}; + int32_t qualityLevel{}; + std::shared_ptr inputPixelMap{}; + std::shared_ptr outputPixelMap{}; + }; + + struct ContrastEnhanceContext { + napi_env env; + napi_async_work asyncWork{}; // asynchronous work object + napi_deferred deferred; + napi_ref callbackRef; + std::shared_ptr inputPixelMap{}; + std::shared_ptr scaledPixelMap{}; + std::shared_ptr lcdPixelMap{}; + sptr pazzleBuffer{}; + OHOS::Rect curPixelmapArea; + OHOS::Rect displayArea; + int pixelmapId = -1; + double fullRatio{};; // 传入的pixelmap 相对于原图的缩放比例 + int sharpnessIntensity{};; + int32_t contentId = -1; + int oriHeight{}; + int oriWidth{}; + unsigned int lcdHeight{}; + unsigned int lcdWidth{}; + bool isLocalDecoding{}; + napi_ref callbackFunc; + double defaultRatio{}; + bool isLCDLutFinished{}; + bool isCanceled{}; + bool genFinalEffect{}; + }; + + struct NapiValues { + napi_status status = napi_ok; + napi_value thisVar{}; + napi_value result{}; + napi_value* argv{}; + size_t argc = 0; + }; + + static thread_local napi_ref constructor_; + static thread_local napi_ref qualityLevelTypeRef_; + static thread_local std::shared_ptr contrastContext_; + static thread_local std::shared_ptr detailContext_; + + static bool PrepareNapiEnv(napi_env env, napi_callback_info info, NapiValues* nVal); + static bool ParseRect(napi_env env, napi_value nVal, OHOS::Rect& rect); + static bool ParseSize(napi_env env, napi_value nVal); + static std::shared_ptr PrepareDstPixelMap(napi_env env, VpeNapi::DetailEnhanceContext* context); + static ImageType ParserImageType(napi_env env, napi_value argv); + + static void SetDstPixelMapInfo(OHOS::Media::PixelMap& source, void* dstPixels, uint32_t dstPixelsSize, + std::unique_ptr& memory, OHOS::Media::PixelMap& dstPixelMap); + static bool AllocMemory(OHOS::Media::PixelMap& source, OHOS::Media::PixelMap& dstPixelMap, + const InitializationOptions& opt); + static bool ConvertPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, + sptr& bufferImpl); + static std::unique_ptr CreateDstPixelMap(OHOS::Media::PixelMap& source, + const OHOS::Media::InitializationOptions& opts); + static sptr GetSurfaceBufferFromDMAPixelMap( + const std::shared_ptr& pixelmap); + static napi_value CreateEnumTypeObject(napi_env env, + napi_valuetype type, napi_ref* ref, std::vector& imageEnumMap); + + // detail enhancer + static bool InitDetailAlgo(napi_env env, int level); + static bool ConfigResolutionBasedOnRatio(napi_env env, napi_value& nVal, + std::shared_ptr context); + static bool ConfigResolution(napi_env env, napi_value& width, napi_value& height, + std::shared_ptr context); + static bool ParseDetailEnhanceParameter(napi_env env, napi_callback_info info); + static std::shared_ptr DetailEnhanceImpl(napi_env env, VpeNapi::DetailEnhanceContext* context); + + // contrast enhancer + static bool InitContrastAlgo(napi_env env); + static bool ParseLCDParameter(napi_env env, napi_callback_info info, NapiValues& nVal); + static bool ParseDetailImageParameter(napi_env env, napi_callback_info info, NapiValues& nVal); + static bool GenerateRegionHist(napi_env env, ContrastEnhanceContext* context); + static bool UpdateMetadataBasedOnLcd(ContrastEnhanceContext* context); + static bool UpdateMetadataBasedOnDetail(ContrastEnhanceContext* context); + static napi_value CallCallback(napi_env env, ContrastEnhanceContext* context); + + static napi_value Constructor(napi_env env, napi_callback_info info); + static void Destructor(napi_env env, void* nativeObject, void* finalize); + static void ThrowExceptionError(napi_env env, const int32_t errCode, const std::string errMsg); + static napi_value DoInitAfter(napi_env env, napi_value exports, napi_value constructor, size_t property_count, + const napi_property_descriptor* properties); + static std::vector RegisterNapi(); +}; +} +} +#endif // INTERFACES_KITS_JS_COMMON_INCLUDE_DETAIL_ENHANCE_NAPI_FORMAL_H \ No newline at end of file diff --git a/interfaces/kits/js/native_module_ohos_imageprocessing.cpp b/interfaces/kits/js/native_module_ohos_imageprocessing.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3c643fc8c1fa3ec585d30322eaee6b7791f4b37c --- /dev/null +++ b/interfaces/kits/js/native_module_ohos_imageprocessing.cpp @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "native_module_ohos_imageprocessing.h" + +#undef LOG_DOMAIN +#define LOG_DOMAIN LOG_TAG_DOMAIN_ID_IMAGE + +#undef LOG_TAG +#define LOG_TAG 0xD002B3F + +namespace OHOS { +namespace Media { +/* +* Function registering all props and functions of multimedia.videoProcessingEngine module +*/ +static napi_value Export(napi_env env, napi_value exports) +{ + VpeNapi::Init(env, exports); + return exports; +} + +/* + * module define + */ +static napi_module videoProcessingModule = { + .nm_version = 1, + .nm_flags = 0, + .nm_filename = nullptr, + .nm_register_func = Export, + .nm_modname = "multimedia.videoProcessingEngine", + .nm_priv = nullptr, + .reserved = {0}, +}; + +/* + * module register + */ +extern "C" __attribute__((constructor)) void VideoProcessingModule(void) +{ + napi_module_register(&videoProcessingModule); +} +} // namespace Media +} // namespace OHOS diff --git a/interfaces/kits/js/native_module_ohos_imageprocessing.h b/interfaces/kits/js/native_module_ohos_imageprocessing.h new file mode 100644 index 0000000000000000000000000000000000000000..918b667822c8be0fc86c3c8d88e48f0fd392e143 --- /dev/null +++ b/interfaces/kits/js/native_module_ohos_imageprocessing.h @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTERFACES_KITS_JS_COMMON_INCLUDE_NATIVE_MODULE_OHOS_ENHANCE_H +#define INTERFACES_KITS_JS_COMMON_INCLUDE_NATIVE_MODULE_OHOS_ENHANCE_H + +#include "detail_enhance_napi_formal.h" + +#endif // INTERFACES_KITS_JS_COMMON_INCLUDE_NATIVE_MODULE_OHOS_ENHANCE_H \ No newline at end of file diff --git a/services/BUILD.gn b/services/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..81d6ac7ac091da735f67ea1ad5ed544a338f85f6 --- /dev/null +++ b/services/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/config/ohos/config.gni") +import("//build/ohos.gni") +import("//foundation/ability/idl_tool/idl_config.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +group("video_processing_service_group") { + deps = [ + ":videoprocessingservice", + "sa_profile:video_processing_service", + "sa_profile:video_processing_service_etc" + ] +} + +idl_interface_sources = [ + "${target_gen_dir}/video_processing_service_manager_proxy.cpp", + "${target_gen_dir}/video_processing_service_manager_stub.cpp", +] + +idl_gen_interface("videoprocessingservice_interface") { + src_idl = rebase_path("IVideoProcessingServiceManager.idl") + dst_file = string_join(",", idl_interface_sources) + log_domainid = "0x00010256" + log_tag = "VideoProcessingService" +} + +config("videoprocessingservice_config") { + visibility = [ ":*" ] + include_dirs = [ + "include", + "${target_gen_dir}", + ] + cflags = [] + if (target_cpu == "arm") { + cflags += [ "-DBINDER_IPC_32BIT" ] + } +} + +ohos_shared_library("videoprocessingservice") { + install_enable = true + + sanitize = { + boundary_sanitize = true + cfi = true + cfi_cross_dso = true + integer_overflow = true + ubsan = true + debug = false + } + + configs = [ ":videoprocessingservice_config" ] + output_values = get_target_outputs(":videoprocessingservice_interface") + sources = [ + "src/surface_buffer_info.cpp", + "src/video_processing_server.cpp", + "src/event_handler_factory.cpp", + ] + sources += filter_include(output_values, [ "*.cpp" ]) + defines = [ "AMS_LOG_TAG = \"VideoProcessingService\"" ] + + include_dirs = [ + "include", + "${target_gen_dir}", + "//foundation/multimedia/video_processing_engine/framework/dfx/include", + "//foundation/systemabilitymgr/safwk/services/safwk/include", + "//foundation/systemabilitymgr/samgr/interfaces/innerkits/samgr_proxy/include", + "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include", + ] + + deps = [ + ":videoprocessingservice_interface", + ] + + external_deps = [ + "c_utils:utils", + "hilog:libhilog", + "hitrace:hitrace_meter", + "ipc:ipc_single", + "graphic_surface:surface", + "safwk:system_ability_fwk", + "samgr:samgr_proxy", + "eventhandler:libeventhandler", + ] + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} + +group("videoprocessingservice_idl_headers") { + public_configs = [ ":videoprocessingservice_config" ] + deps = [ ":videoprocessingservice_interface" ] +} \ No newline at end of file diff --git a/services/IVideoProcessingServiceManager.idl b/services/IVideoProcessingServiceManager.idl new file mode 100644 index 0000000000000000000000000000000000000000..6a62cab2c663a411c5e1df97170f0227d0f85aaa --- /dev/null +++ b/services/IVideoProcessingServiceManager.idl @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +sequenceable SurfaceBufferInfo..OHOS.SurfaceBufferInfo; +interface OHOS.IVideoProcessingServiceManager { + void LoadInfo([in] int key, [out] SurfaceBufferInfo bufferInfo); +} \ No newline at end of file diff --git a/services/include/event_handler_factory.h b/services/include/event_handler_factory.h new file mode 100644 index 0000000000000000000000000000000000000000..d19a3ed8226e53c98be97d3e8144efb335f5895f --- /dev/null +++ b/services/include/event_handler_factory.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OHOS_VPE_EVENT_HANDLER_FACTORY_H +#define OHOS_VPE_EVENT_HANDLER_FACTORY_H + +#include +#include +#include +#include "event_handler.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class EventHandlerFactory { +public: + static EventHandlerFactory& GetInstance(); +private: + EventHandlerFactory(const EventHandlerFactory&) = delete; + EventHandlerFactory& operator= (const EventHandlerFactory&) = delete; + EventHandlerFactory(EventHandlerFactory&&) = delete; + EventHandlerFactory& operator= (EventHandlerFactory&&) = delete; +public: + int32_t Init(); + int32_t UnInit(); + std::shared_ptr CreateEventHandler(const std::string& handlerName); + +private: + EventHandlerFactory() = default; + virtual ~EventHandlerFactory() = default; + std::mutex eventHandlerMutex_; + std::map> eventHandlerMap_; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // OHOS_VPE_EVENT_HANDLER_FACTORY_H diff --git a/services/include/surface_buffer_info.h b/services/include/surface_buffer_info.h new file mode 100644 index 0000000000000000000000000000000000000000..3f595b9fdc737725dfe4ec54f09c768690aca378 --- /dev/null +++ b/services/include/surface_buffer_info.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SURFACE_BUFFER_INFO_H +#define SURFACE_BUFFER_INFO_H + +#include "parcel.h" +#include "surface_buffer.h" + +namespace OHOS { +struct SurfaceBufferInfo : public Parcelable { + sptr surfacebuffer; + + bool ReadFromParcel(Parcel &parcel); + virtual bool Marshalling(Parcel &parcel) const override; + static SurfaceBufferInfo *Unmarshalling(Parcel &parcel); +}; +} // namespace OHOS +#endif // SURFACE_BUFFER_INFO_H diff --git a/services/include/video_processing_client.h b/services/include/video_processing_client.h new file mode 100644 index 0000000000000000000000000000000000000000..8169cd17b3817f6eb541a569969f659efe163b2a --- /dev/null +++ b/services/include/video_processing_client.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_VIDEO_PROCESSING_CLENT_H +#define VPE_VIDEO_PROCESSING_CLENT_H + +#include "surface_buffer_info.h" +#include "video_processing_service_manager_proxy.h" +#include "ipc_types.h" +#include "refbase.h" +#include "iremote_object.h" +#include "vpe_model_path.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VideoProcessingManager { +public: + VideoProcessingManager(const VideoProcessingManager&) = delete; + VideoProcessingManager& operator=(const VideoProcessingManager&) = delete; + VideoProcessingManager(VideoProcessingManager&&) = delete; + VideoProcessingManager& operator=(VideoProcessingManager&&) = delete; + + static VideoProcessingManager& GetInstance(); + + /* + * @brief Initialize the client environment. + * + */ + void Connect(); + + /* + * @brief Clear the client environment. + * + */ + void Disconnect(); + + /* + * @brief Read file from system to pass surface buffer to VPE module. + * @param key + */ + ErrCode LoadInfo(int32_t key, SurfaceBufferInfo& bufferInfo); + + void LoadSystemAbilitySuccess(const sptr &remoteObject); + void LoadSystemAbilityFail(); + +private: + VideoProcessingManager() = default; + virtual ~VideoProcessingManager() = default; + sptr g_proxy = nullptr; + std::condition_variable g_proxyConVar; +}; +} +} +} +#endif // VPE_VIDEO_PROCESSING_CLENT_H \ No newline at end of file diff --git a/services/include/video_processing_load_callback.h b/services/include/video_processing_load_callback.h new file mode 100644 index 0000000000000000000000000000000000000000..c7813a4aed19858c228c15bb12b899d1cc5ff8fd --- /dev/null +++ b/services/include/video_processing_load_callback.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_LOAD_CALLBACK_H +#define VIDEO_PROCESSING_LOAD_CALLBACK_H + +#include +#include + +#include "refbase.h" +#include "system_ability_load_callback_stub.h" + +namespace OHOS { class IRemoteObject; } +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VideoProcessingLoadCallback : public SystemAbilityLoadCallbackStub { +public: + void OnLoadSystemAbilitySuccess(int32_t systemAbilityId, + const sptr &remoteObject) override; + void OnLoadSystemAbilityFail(int32_t systemAbilityId) override; +}; +} +} +} +#endif \ No newline at end of file diff --git a/services/include/video_processing_server.h b/services/include/video_processing_server.h new file mode 100644 index 0000000000000000000000000000000000000000..12c7c3b16121a3f94da35065c333d7ea2c9c7cb4 --- /dev/null +++ b/services/include/video_processing_server.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_VIDEO_PROCESSING_SERVER_H +#define VPE_VIDEO_PROCESSING_SERVER_H + +#include + +#include +#include + +#include "video_processing_service_manager_stub.h" +#include "ipc_types.h" +#include "event_handler.h" +#include "event_runner.h" +#include "vpe_log.h" +#include "vpe_model_path.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VideoProcessingServer : public SystemAbility, public VideoProcessingServiceManagerStub { + DECLARE_SYSTEM_ABILITY(VideoProcessingServer); + +public: + VideoProcessingServer(int32_t saId, bool runOnCreate); + ~VideoProcessingServer(); + + int32_t Init(); + int32_t UnInit(); + ErrCode LoadInfo(int32_t key, SurfaceBufferInfo& bufferInfo) override; + bool IsInited(); + +protected: + void OnStart() override; + void OnStop() override; + +private: + int32_t CreateUnloadHandler(); + int32_t DestroyUnloadHandler(); + void DelayUnloadTask(); + void UnloadVideoProcessingSA(); + +private: + std::shared_ptr unloadHandler_; + std::mutex unloadMutex_; + std::atomic isInited_{false}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_VIDEO_PROCESSING_SERVER_H \ No newline at end of file diff --git a/services/include/vpe_model_path.h b/services/include/vpe_model_path.h new file mode 100644 index 0000000000000000000000000000000000000000..63179087067673a24311a0d43117faadf764afb1 --- /dev/null +++ b/services/include/vpe_model_path.h @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VIDEO_PROCESSING_SERVICE_VPE_MODEL_PATH_H +#define VIDEO_PROCESSING_SERVICE_VPE_MODEL_PATH_H +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum MODEL_PATHS { + AILIGHT_NORMAL = 0, + AILIGHT_STRONG, + AILIGHT_CLS, + AIHDR_PIC, + HDR_COMPOSE, + SDR_CONVERT, + HDR_DECOMPOSE, + IMAGE_AISR_ALGO_CONFIG, + IMAGE_SR_MODEL_0, + IMAGE_SR_MODEL_1, + IMAGE_SR_MODEL_2, + IMAGE_SR_MODEL_3, + IMAGE_SR_MODEL_4, + CALC_HIST_COMP_SHADER_0, + CALC_HIST_COMP_RGB_SHADER_1, + CALC_HIST_COMP_LIMIT_SHADER_1, + CALC_HIST_COMP_FULL_SHADER_1, + SDR2SDR_VIDEO_CONVERT_EBU_Y2Y, + SDR2SDR_VIDEO_CONVERT_EBU_Y2R, + SDR2SDR_VIDEO_CONVERT_EBU_R2Y, + SDR2SDR_VIDEO_CONVERT_EBU_R2R, + SDR2SDR_VIDEO_CONVERT_SMPTE_Y2Y, + SDR2SDR_VIDEO_CONVERT_SMPTE_Y2R, + SDR2SDR_VIDEO_CONVERT_SMPTE_R2Y, + SDR2SDR_VIDEO_CONVERT_SMPTE_R2R, + HDR2HDR_VIDEO_PQ2HLG_Y2Y, + HDR2HDR_VIDEO_PQ2HLG_Y2R, + HDR2HDR_VIDEO_PQ2HLG_R2Y, + HDR2HDR_VIDEO_PQ2HLG_R2R, + HDR2HDR_VIDEO_HLG2PQ_Y2Y, + HDR2HDR_VIDEO_HLG2PQ_Y2R, + HDR2HDR_VIDEO_HLG2PQ_R2Y, + HDR2HDR_VIDEO_HLG2PQ_R2R, + HDR2SDR_VIDEO_CONVERT_HLG_Y2Y, + HDR2SDR_VIDEO_CONVERT_HLG_Y2R, + HDR2SDR_VIDEO_CONVERT_HLG_R2Y, + HDR2SDR_VIDEO_CONVERT_HLG_R2R, + HDR2SDR_VIDEO_CONVERT_HLG_Y2Y_DEFAULT, + HDR2SDR_VIDEO_CONVERT_HLG_Y2R_DEFAULT, + HDR2SDR_VIDEO_CONVERT_HLG_R2Y_DEFAULT, + HDR2SDR_VIDEO_CONVERT_HLG_R2R_DEFAULT, + HDR2SDR_VIDEO_CONVERT_PQ_Y2Y, + HDR2SDR_VIDEO_CONVERT_PQ_Y2R, + HDR2SDR_VIDEO_CONVERT_PQ_R2Y, + HDR2SDR_VIDEO_CONVERT_PQ_R2R, + VIDEO_AISR_ALGO_CONFIG, + VIDEO_SR_MODEL_0, + VIDEO_SR_MODEL_1, + VIDEO_SR_MODEL_2, + VIDEO_SR_MODEL_3, + VIDEO_SR_MODEL_4, + VIDEO_SR_MODEL_5, + VIDEO_SR_MODEL_6, + VIDEO_SR_MODEL_7, + VIDEO_SR_BILINEAR_CL_KERNEL, + VIDEO_SR_BICUBIC_CL_KERNEL, + VIDEO_SR_SR_CL_KERNEL, + EVE_10bit_SCALE_KERNEL, + VIDEO_AIHDR_ALGO_CONFIG, + VIDEO_AIHDR_MODEL, + VPEMODEL_PATHS_LENGTH, +}; + +const std::array VPE_MODEL_PATHS = { + "/sys_prod/etc/VideoProcessingEngine/AILIGHT_normal.omc", + "/sys_prod/etc/VideoProcessingEngine/AILIGHT_strong.omc", + "/sys_prod/etc/VideoProcessingEngine/AILIGHT_cls.omc", + "/sys_prod/etc/VideoProcessingEngine/aihdr_pic.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr_compose.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr_convert.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr_decompose.bin", + "/sys_prod/etc/VideoProcessingEngine/image_aisr_algo_config.xml", + "/sys_prod/etc/VideoProcessingEngine/Image_SR_Model_576x576_20240402.omc", + "/sys_prod/etc/VideoProcessingEngine/Image_SR_Model_848x1104_20240402.omc", + "/sys_prod/etc/VideoProcessingEngine/Image_SR_Model_1104x1488_20240402.omc", + "/sys_prod/etc/VideoProcessingEngine/Image_SR_Model_1488x1104_20240402.omc", + "/sys_prod/etc/VideoProcessingEngine/Image_SR_Model_1872x1360_20240402.omc", + "/sys_prod/etc/VideoProcessingEngine/calcHistCompShader0.bin", + "/sys_prod/etc/VideoProcessingEngine/calcHistCompRgbShader1.bin", + "/sys_prod/etc/VideoProcessingEngine/calcHistCompLimitShader1.bin", + "/sys_prod/etc/VideoProcessingEngine/calcHistCompFullShader1.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_ebu_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_ebu_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_ebu_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_ebu_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_smpte_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_smpte_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_smpte_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/sdr2sdr_video_convert_smpte_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_pq2hlg_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_pq2hlg_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_pq2hlg_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_pq2hlg_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_hlg2pq_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_hlg2pq_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_hlg2pq_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2hdr_video_hlg2pq_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_y2y_default.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_y2r_default.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_r2y_default.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_hlg_r2r_default.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_pq_y2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_pq_y2r.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_pq_r2y.bin", + "/sys_prod/etc/VideoProcessingEngine/hdr2sdr_video_pq_r2r.bin", + "/sys_prod/etc/VideoProcessingEngine/video_aisr_ohos_config.xml", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_576x576_2x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_576x1024_2x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_1024x576_2x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_768x1280_2x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_1280x768_2x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_1088x1920_1x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_1920x1088_1x.omc", + "/sys_prod/etc/VideoProcessingEngine/Video_SR_Model_1920x1920_1x.omc", + "/sys_prod/etc/VideoProcessingEngine/bilinear.bin", + "/sys_prod/etc/VideoProcessingEngine/bicubic.bin", + "/sys_prod/etc/VideoProcessingEngine/sr.bin", + "/sys_prod/etc/VideoProcessingEngine/10bitProcess.bin", + "/sys_prod/etc/VideoProcessingEngine/video_aihdr_algo_config.xml", + "/sys_prod/etc/VideoProcessingEngine/GTM_AIHDR.omc", +}; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VIDEO_PROCESSING_SERVICE_VPE_MODEL_PATH_H \ No newline at end of file diff --git a/services/sa_profile/66134.json b/services/sa_profile/66134.json new file mode 100644 index 0000000000000000000000000000000000000000..65a02367c21db8e8be2200354cd6dd8b0357bc5f --- /dev/null +++ b/services/sa_profile/66134.json @@ -0,0 +1,16 @@ +{ + "process": "video_processing_service", + "systemability": [ + { + "name": 66134, + "libpath": "libvideoprocessingservice.z.so", + "run-on-create": false, + "auto-restart": true, + "distributed": false, + "start-on-demand":{ + }, + "stop-on-demand":{ + } + } + ] +} \ No newline at end of file diff --git a/services/sa_profile/BUILD.gn b/services/sa_profile/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..d0d9a5ea3fd9f8dfafd37482b4a4f597802a7696 --- /dev/null +++ b/services/sa_profile/BUILD.gn @@ -0,0 +1,27 @@ +# Copyright (c) 2025 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/ohos/sa_profile/sa_profile.gni") + +ohos_sa_profile("video_processing_service") { + sources = [ "66134.json" ] + part_name = "video_processing_engine" +} + +ohos_prebuilt_etc("video_processing_service_etc") { + source = "video_processing_service.cfg" + relative_install_dir = "init" + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/services/sa_profile/video_processing_service.cfg b/services/sa_profile/video_processing_service.cfg new file mode 100644 index 0000000000000000000000000000000000000000..c08775678c222221fa396409633d6273c1c2072b --- /dev/null +++ b/services/sa_profile/video_processing_service.cfg @@ -0,0 +1,23 @@ +{ + "jobs" : [ + { + "name" : "early-boot", + "cmds" : [ + "mkdir /data/service/el1/public/videoprocessingservice 0711 media media" + ] + } + ], + "services": [ + { + "name": "video_processing_service", + "path": [ + "/system/bin/sa_main", + "/system/profile/video_processing_service.json" + ], + "uid": "media", + "gid": ["system"], + "ondemand": true, + "secon" : "u:r:video_processing_service:s0" + } + ] +} \ No newline at end of file diff --git a/services/src/event_handler_factory.cpp b/services/src/event_handler_factory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c343ff7df7570e67ac93246b994404bb3c2b750a --- /dev/null +++ b/services/src/event_handler_factory.cpp @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023-2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "event_handler_factory.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +const int HANDLER_NAME_MAX_LANEGTH = 4096; + +EventHandlerFactory& EventHandlerFactory::GetInstance() +{ + static EventHandlerFactory instance; + return instance; +} + +int32_t EventHandlerFactory::Init() +{ + VPE_LOGI("EventHandlerFactory Init!"); + auto unloadRunner = AppExecFwk::EventRunner::Create("unload_vpe_sa_handler"); + std::lock_guard lock(eventHandlerMutex_); + eventHandlerMap_["unload_vpe_sa_handler"] = std::make_shared(unloadRunner); + return ERR_OK; +} + +int32_t EventHandlerFactory::UnInit() +{ + VPE_LOGI("EventHandlerFactory UnInit"); + std::lock_guard lock(eventHandlerMutex_); + eventHandlerMap_.clear(); + return ERR_OK; +} + +std::shared_ptr EventHandlerFactory::CreateEventHandler(const std::string& handlerName) +{ + if (handlerName.empty() || handlerName.size() > HANDLER_NAME_MAX_LANEGTH) { + VPE_LOGE("handlerName is invalid!"); + return nullptr; + } + VPE_LOGI("createEventHandler, handlerName: %{public}s", handlerName.c_str()); + std::lock_guard lock(eventHandlerMutex_); + return eventHandlerMap_[handlerName]; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/services/src/surface_buffer_info.cpp b/services/src/surface_buffer_info.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e5227a8e36e382024b6786a4990919bb6e432bc0 --- /dev/null +++ b/services/src/surface_buffer_info.cpp @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2022-2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "surface_buffer_info.h" + +#include "message_parcel.h" +#include "vpe_log.h" + +namespace OHOS { +bool SurfaceBufferInfo::ReadFromParcel(Parcel &parcel) +{ + auto messageParcel = static_cast(&parcel); + if (surfacebuffer == nullptr) { + VPE_LOGE("surfacebuffer is null!"); + return false; + } + auto status = surfacebuffer->ReadFromMessageParcel(*messageParcel); + if (status != GSERROR_OK) { + return false; + } + if (surfacebuffer == nullptr) { + return false; + } + return true; +} + +bool SurfaceBufferInfo::Marshalling(Parcel &parcel) const +{ + auto messageParcel = static_cast(&parcel); + auto status = surfacebuffer->WriteToMessageParcel(*messageParcel); + if (status != GSERROR_OK) { + return false; + } + return true; +} + +SurfaceBufferInfo *SurfaceBufferInfo::Unmarshalling(Parcel &parcel) +{ + SurfaceBufferInfo *info = new (std::nothrow) SurfaceBufferInfo(); + if (info == nullptr) { + return nullptr; + } + info->surfacebuffer = SurfaceBuffer::Create(); + if (!info->ReadFromParcel(parcel)) { + delete info; + return nullptr; + } + + return info; +} +} // namespace OHOS \ No newline at end of file diff --git a/services/src/video_processing_client.cpp b/services/src/video_processing_client.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1d02a7e68b20e128a9e90c09df36614ed9773e90 --- /dev/null +++ b/services/src/video_processing_client.cpp @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_client.h" + +#include + +#include "video_processing_service_manager_proxy.h" +#include "iservice_registry.h" +#include "surface_buffer.h" +#include "event_handler.h" +#include "event_runner.h" +#include "video_processing_load_callback.h" +#include "vpe_log.h" + +namespace OHOS { class IRemoteObject; } +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace { +std::mutex g_proxyLock; +const int32_t VIDEO_PROCESSING_SERVER_SA_ID = 0x00010256; +const int LOADSA_TIMEOUT_MS = 1000; +} + +VideoProcessingManager& VideoProcessingManager::GetInstance() +{ + static VideoProcessingManager instance; + return instance; +} + +void VideoProcessingManager::Connect() +{ + std::unique_lock lock(g_proxyLock); + g_proxy = nullptr; + auto samgr = SystemAbilityManagerClient::GetInstance().GetSystemAbilityManager(); + CHECK_AND_RETURN_LOG(samgr != nullptr, "GetSystemAbilityManager return null"); + VPE_LOGD("GetSystemAbilityManager return is not null, success"); + auto object = samgr->CheckSystemAbility(VIDEO_PROCESSING_SERVER_SA_ID); + if (object != nullptr) { + VPE_LOGD("object is not null, get service succeed"); + g_proxy = iface_cast(object); + return; + } + VPE_LOGD("object is null, Start load SA"); + sptr loadCallback = new VideoProcessingLoadCallback(); + CHECK_AND_RETURN_LOG(loadCallback != nullptr, "New VideoProcessingLoadCallback fail!"); + int32_t ret = samgr->LoadSystemAbility(VIDEO_PROCESSING_SERVER_SA_ID, loadCallback); + CHECK_AND_RETURN_LOG(ret == ERR_OK, "LoadSystemAbility %{public}d failed!", VIDEO_PROCESSING_SERVER_SA_ID); + VPE_LOGD("VideoProcessingService SA load start!"); + auto waitStatus = g_proxyConVar.wait_for(lock, std::chrono::milliseconds(LOADSA_TIMEOUT_MS), + [this]() { return g_proxy != nullptr; }); + VPE_LOGD("VideoProcessingService SA load end!"); + if (!waitStatus) { + VPE_LOGE("VideoProcessingService SA load timeout!"); + return; + } + if (waitStatus && g_proxy != nullptr) { + VPE_LOGI("Load VideoProcessingService success"); + } + return; +} + +void VideoProcessingManager::Disconnect() +{ + VPE_LOGD("VideoProcessingManager Disconnect!"); + return; +} + +ErrCode VideoProcessingManager::LoadInfo(int32_t key, SurfaceBufferInfo& bufferInfo) +{ + std::lock_guard lock(g_proxyLock); + CHECK_AND_RETURN_RET_LOG(g_proxy != nullptr, ERR_NULL_OBJECT, "LoadInfo: g_proxy is nullptr!"); + return g_proxy->LoadInfo(key, bufferInfo); +} + +void VideoProcessingManager::LoadSystemAbilitySuccess(const sptr &remoteObject) +{ + VPE_LOGI("Get VideoProcessingService SA success!"); + std::unique_lock lock(g_proxyLock); + if (remoteObject != nullptr) { + VPE_LOGD("remoteObject is not null."); + g_proxy = iface_cast(remoteObject); + g_proxyConVar.notify_one(); + } +} + +void VideoProcessingManager::LoadSystemAbilityFail() +{ + VPE_LOGE("Get VideoProcessingService SA failed!"); + std::unique_lock lock(g_proxyLock); + g_proxy = nullptr; +} +} +} +} \ No newline at end of file diff --git a/services/src/video_processing_load_callback.cpp b/services/src/video_processing_load_callback.cpp new file mode 100644 index 0000000000000000000000000000000000000000..19a6c52958961c7e2c5de8583061d890baf8ce30 --- /dev/null +++ b/services/src/video_processing_load_callback.cpp @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_load_callback.h" + +#include "video_processing_client.h" +#include "vpe_log.h" + +namespace OHOS { class IRemoteObject; } +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +void VideoProcessingLoadCallback::OnLoadSystemAbilitySuccess( + int32_t systemAbilityId, const sptr &remoteObject) +{ + VideoProcessingManager::GetInstance().LoadSystemAbilitySuccess(remoteObject); + VPE_LOGI("SA ID: %{public}d, on load SA success", systemAbilityId); +} + +void VideoProcessingLoadCallback::OnLoadSystemAbilityFail(int32_t systemAbilityId) +{ + VideoProcessingManager::GetInstance().LoadSystemAbilityFail(); + VPE_LOGI("SA ID: %{public}d, on load SA failed", systemAbilityId); +} +} +} +} \ No newline at end of file diff --git a/services/src/video_processing_server.cpp b/services/src/video_processing_server.cpp new file mode 100644 index 0000000000000000000000000000000000000000..713a4ef8f714b209f0aca7b1f2f1a16beada4b1f --- /dev/null +++ b/services/src/video_processing_server.cpp @@ -0,0 +1,204 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "video_processing_server.h" + +#include +#include +#include +#include +#include +#include + +#include "surface_buffer.h" +#include "event_handler_factory.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace { + const int VPE_INFO_FILE_MAX_LENGTH = 20485760; + const int32_t VIDEO_PROCESSING_SERVER_SA_ID = 0x00010256; + const std::string UNLOAD_TASK_ID = "unload_vpe_svr"; + constexpr int32_t DELAY_TIME = 180000; + REGISTER_SYSTEM_ABILITY_BY_ID(VideoProcessingServer, VIDEO_PROCESSING_SERVER_SA_ID, false); +} + +VideoProcessingServer::VideoProcessingServer(int32_t saId, bool runOnCreate) : SystemAbility(saId, runOnCreate) +{ + VPE_LOGD("VideoProcessingServer construct!"); +} +VideoProcessingServer::~VideoProcessingServer() +{ + VPE_LOGD("VideoProcessingServer destruction!"); + UnInit(); +} + +int32_t VideoProcessingServer::Init() +{ + VPE_LOGD("VideoProcessingServer init begin"); + if (EventHandlerFactory::GetInstance().Init() != ERR_NONE) { + VPE_LOGE("EventHandlerFactory init failed"); + return ERR_NULL_OBJECT; + } + VPE_LOGD("VideoProcessingServer init finish"); + return ERR_NONE; +} + +bool VideoProcessingServer::IsInited() +{ + return isInited_; +} + +int32_t VideoProcessingServer::UnInit() +{ + isInited_ = false; + if (EventHandlerFactory::GetInstance().UnInit() != ERR_NONE) { + VPE_LOGE("EventHandlerFactory UnInit failed"); + return ERR_NULL_OBJECT; + } + VPE_LOGD("VideoProcessingServer uninit succeeded"); + return ERR_NONE; +} + +int32_t VideoProcessingServer::CreateUnloadHandler() +{ + std::lock_guard lock(unloadMutex_); + if (unloadHandler_ == nullptr) { + unloadHandler_ = EventHandlerFactory::GetInstance().CreateEventHandler("unload_vpe_sa_handler"); + } + if (unloadHandler_ == nullptr) { + VPE_LOGE("UnloadHandler is nullptr!"); + return ERR_NULL_OBJECT; + } + return ERR_NONE; +} + +int32_t VideoProcessingServer::DestroyUnloadHandler() +{ + std::lock_guard lock(unloadMutex_); + if (unloadHandler_ == nullptr) { + VPE_LOGE("UnloadHandler is nullptr!"); + return ERR_NULL_OBJECT; + } + unloadHandler_->RemoveTask(UNLOAD_TASK_ID); + unloadHandler_ = nullptr; + return ERR_NONE; +} + +ErrCode VideoProcessingServer::LoadInfo(int32_t key, SurfaceBufferInfo& bufferInfo) +{ + if (key < 0 || key >= VPEMODEL_PATHS_LENGTH) { + VPE_LOGE("Input key %{public}d is invalid!", key); + UnloadVideoProcessingSA(); + return ERR_INVALID_DATA; + } + std::string path = VPE_MODEL_PATHS[key]; + VPE_LOGD("LoadInfoForVpe %{public}s", path.c_str()); + bufferInfo.surfacebuffer = SurfaceBuffer::Create(); + if (bufferInfo.surfacebuffer == nullptr) { + VPE_LOGE("Create surface buffer failed"); + UnloadVideoProcessingSA(); + return ERR_NULL_OBJECT; + } + std::unique_ptr fileStream = std::make_unique(path, std::ios::binary); + if (!fileStream->is_open()) { + VPE_LOGE("file is not open %{public}s", path.c_str()); + UnloadVideoProcessingSA(); + return ERR_NULL_OBJECT; + } + fileStream->seekg(0, std::ios::end); + int fileLength = fileStream->tellg(); + fileStream->seekg(0, std::ios::beg); + if (fileLength < 0 || fileLength > VPE_INFO_FILE_MAX_LENGTH) { + VPE_LOGE("fileLength %{public}d is too short or too long!", fileLength); + UnloadVideoProcessingSA(); + return ERR_INVALID_DATA; + } + + BufferRequestConfig inputCfg; + inputCfg.width = fileLength; + inputCfg.height = 1; + VPE_LOGD("FileLength: %{public}d", fileLength); + inputCfg.strideAlignment = fileLength; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA; + inputCfg.format = GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + inputCfg.timeout = 0; + GSError err = bufferInfo.surfacebuffer->Alloc(inputCfg); + if (err != GSERROR_OK) { + VPE_LOGE("Alloc surface buffer failed"); + UnloadVideoProcessingSA(); + return ERR_INVALID_DATA; + } + fileStream->read(reinterpret_cast(bufferInfo.surfacebuffer->GetVirAddr()), fileLength); + fileStream->close(); + UnloadVideoProcessingSA(); + return ERR_NONE; +} + +void VideoProcessingServer::UnloadVideoProcessingSA() +{ + if (CreateUnloadHandler() == ERR_NONE) { + VPE_LOGI("CreateUnloadHandler success!"); + DelayUnloadTask(); + } else { + return; + } + VPE_LOGD("Start/Update Delay Time Unload VPE SA!"); + return; +} + +void VideoProcessingServer::DelayUnloadTask() +{ + VPE_LOGD("delay unload task begin"); + auto task = []() { + VPE_LOGD("do unload task"); + auto samgrProxy = SystemAbilityManagerClient::GetInstance().GetSystemAbilityManager(); + if (samgrProxy == nullptr) { + VPE_LOGE("get samgr failed"); + return; + } + int32_t unloadResult = samgrProxy->UnloadSystemAbility(VIDEO_PROCESSING_SERVER_SA_ID); + if (unloadResult != ERR_OK) { + VPE_LOGE("remove system ability failed"); + return; + } + VPE_LOGI("kill VPE service success!"); + }; + unloadHandler_->RemoveTask(UNLOAD_TASK_ID); + VPE_LOGD("delay unload task post task"); + unloadHandler_->PostTask(task, UNLOAD_TASK_ID, DELAY_TIME); +} + +void VideoProcessingServer::OnStart() +{ + VPE_LOGD("VPE SA Onstart!"); + Init(); + Publish(this); +} + +void VideoProcessingServer::OnStop() +{ + VPE_LOGD("VPE SA Onstop!"); + DestroyUnloadHandler(); + if (UnInit() != ERR_NONE) { + VPE_LOGE("Uninit failed"); + return; + } +} +} +} +} \ No newline at end of file diff --git a/test/BUILD.gn b/test/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..fc8b8799693fa4fc9faf1f830c39aab4bb65d9bc --- /dev/null +++ b/test/BUILD.gn @@ -0,0 +1,91 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") + +declare_args() { + vpe_support_demo_test = true + vpe_support_unit_test = true + vpe_support_module_test = true + cscv_support_demo_test = true + cscv_support_unit_test = true + video_support_demo_test = true + image_processing_unit_test = true + detail_enhancer_video_unit_test = true + detail_enhancer_unit_test = true + detail_enhancer_video_ndk_unit_test = true + colorSpace_converter_video_ndk_unit_test = true + metadata_gen_video_ndk_unit_test = true + video_variable_refreshrate_unit_test = true + aihdr_enhancer_video_unit_test = true + aihdr_enhancer_unit_test = true +} + +group("demo_test") { + testonly = true + deps = [] + if (vpe_support_demo_test) { + deps += [ "nativedemo/vpe_demo:vpe_demo" ] + } +} + +group("unit_test") { + testonly = true + deps = [] + if (vpe_support_unit_test) { + deps += [ "unittest/vpe_framework:vpe_framework_unit_test" ] + } + if (image_processing_unit_test) { + deps += [ "unittest/image_processing:image_processing_unit_test" ] + } + if (detail_enhancer_unit_test) { + deps += [ "unittest/detail_enhancer:detail_enhancer_unit_test" ] + } + if (detail_enhancer_video_unit_test) { + deps += [ "unittest/detail_enhancer_video:detail_enhancer_video_unit_test" ] + } + if (detail_enhancer_video_ndk_unit_test) { + deps += [ "unittest/detail_enhancer_video_ndk:detail_enhancer_video_ndk_unit_test" ] + } + if (cscv_support_unit_test) { + deps += [ "unittest/colorspace_converter_video:colorspace_converter_video_unit_test" ] + } + if (colorSpace_converter_video_ndk_unit_test) { + deps += [ "unittest/colorSpace_converter_video_ndk:colorSpace_converter_video_ndk_unit_test" ] + } + if (metadata_gen_video_ndk_unit_test) { + deps += [ "unittest/metadata_gen_video_ndk:metadata_gen_video_ndk_unit_test" ] + } + if (video_variable_refreshrate_unit_test) { + deps += [ "unittest/video_variable_refreshrate_test:video_variable_refreshrate_unit_test" ] + } + if (aihdr_enhancer_video_unit_test) { + deps += [ "unittest/aihdr_enhancer_video:aihdr_enhancer_video_unit_test" ] + } + if (aihdr_enhancer_unit_test) { + deps += [ "unittest/aihdr_enhancer_video:aihdr_enhancer_video_unit_test" ] + } +} + +group("module_test") { + testonly = true + deps = [] + if (vpe_support_module_test) { + deps += [ + "moduletest/colorspace_converter:colorspace_converter_module_test", + "moduletest/metadata_generator:metadata_generator_module_test", + "moduletest/colorspace_converter_video:colorspace_converter_video_module_test", + "moduletest/metadata_generator_video:metadata_generator_video_module_test", + ] + } +} diff --git a/test/moduletest/colorspace_converter/BUILD.gn b/test/moduletest/colorspace_converter/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..ce20fe5851b8ae5567c0b93041e37d4792b6cd7b --- /dev/null +++ b/test/moduletest/colorspace_converter/BUILD.gn @@ -0,0 +1,53 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_moduletest("colorspace_converter_module_test") { + module_out_path = MODULE_TEST_OUTPUT_PATH + + include_dirs = [ + ".", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample" + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "csc_module_test.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample:csc_test_utils" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/moduletest/colorspace_converter/csc_module_test.cpp b/test/moduletest/colorspace_converter/csc_module_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f2de8d9ed2c719470278dcc2d37c57d99702c57b --- /dev/null +++ b/test/moduletest/colorspace_converter/csc_module_test.cpp @@ -0,0 +1,414 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include "csc_sample.h" +#include "csc_sample_define.h" +#include "refbase.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +using namespace CSCSampleDefine; +class CSCModuleTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() {}; + void TearDown() {}; + + void SetParameter(std::shared_ptr plugin); + sptr PrepareOneFrame(); +}; + +void CSCModuleTest::SetParameter(std::shared_ptr plugin) +{ + ColorSpaceConverterParameter parameterSet; + parameterSet.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC; + int32_t ret = plugin->SetParameter(parameterSet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +sptr CSCModuleTest::PrepareOneFrame() +{ + std::unique_ptr metadataFile = + std::make_unique(METADATA_FILE.data(), std::ios::binary | std::ios::in); + std::unique_ptr yuvFile = + std::make_unique(YUV_FILE.data(), std::ios::binary | std::ios::in); + + auto buffer = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + if (buffer == nullptr) { + return nullptr; + } + SetMeatadata(buffer, INPUT_COLORSPACE_INFO); + SetMeatadata(buffer, CM_VIDEO_HDR_VIVID); + SetMeatadata(buffer, metadataFile); + ReadYuvFile(buffer, yuvFile, ONE_FRAME_SIZE); + + return buffer; +} + +/** + * @tc.number : 0101 + * @tc.func : Create + * @tc.desc : Test for ColorSpaceConverter Create + */ +HWTEST_F(CSCModuleTest, Create_0101, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); +} + +/** + * @tc.number : 0201 + * @tc.func : SetParameter + * @tc.desc : parameter.renderIntent != RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC + */ +HWTEST_F(CSCModuleTest, SetParameter_0201, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + ColorSpaceConverterParameter parameterSet; + parameterSet.renderIntent = RenderIntent::RENDER_INTENT_PERCEPTUAL; + int32_t ret = plugin->SetParameter(parameterSet); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0202 + * @tc.func : SetParameter + * @tc.desc : parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC + */ +HWTEST_F(CSCModuleTest, SetParameter_0202, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); +} + +/** + * @tc.number : 0301 + * @tc.func : GetParameter + * @tc.desc : Call after Create + */ +HWTEST_F(CSCModuleTest, GetParameter_0301, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + ColorSpaceConverterParameter parameterGet; + int32_t ret = plugin->GetParameter(parameterGet); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0302 + * @tc.func : GetParameter + * @tc.desc : Call after SetParameter + */ +HWTEST_F(CSCModuleTest, GetParameter_0302, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + ColorSpaceConverterParameter parameterGet; + int32_t ret = plugin->GetParameter(parameterGet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); + ASSERT_EQ(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC, parameterGet.renderIntent); +} + +/** + * @tc.number : 0401 + * @tc.func : Process + * @tc.desc : Call after Create + */ +HWTEST_F(CSCModuleTest, Process_0401, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + int32_t ret = plugin->Process(SurfaceBuffer::Create(), SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0402 + * @tc.func : Process + * @tc.desc : Call after SetParameter, input is null + */ +HWTEST_F(CSCModuleTest, Process_0402, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->Process(nullptr, SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0403 + * @tc.func : Process + * @tc.desc : Call after SetParameter, output is null + */ +HWTEST_F(CSCModuleTest, Process_0403, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->Process(SurfaceBuffer::Create(), nullptr); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0404 + * @tc.func : Process + * @tc.desc : Call after SetParameter, input and output is not null + */ +HWTEST_F(CSCModuleTest, Process_0404, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto input = PrepareOneFrame(); + ASSERT_NE(nullptr, input); + auto output = CreateSurfaceBuffer(OUTPUT_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, output); + SetMeatadata(output, OUTPUT_COLORSPACE_INFO); + + int32_t ret = plugin->Process(input, output); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0501 + * @tc.func : ComposeImage + * @tc.desc : Call after Create + */ +HWTEST_F(CSCModuleTest, ComposeImage_0501, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + int32_t ret = plugin->ComposeImage( + SurfaceBuffer::Create(), SurfaceBuffer::Create(), SurfaceBuffer::Create(), false); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0502 + * @tc.func : ComposeImage + * @tc.desc : Call after SetParameter, inputSdrImage is null + */ +HWTEST_F(CSCModuleTest, ComposeImage_0502, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->ComposeImage(nullptr, SurfaceBuffer::Create(), SurfaceBuffer::Create(), false); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0503 + * @tc.func : ComposeImage + * @tc.desc : Call after SetParameter, inputGainmap is null + */ +HWTEST_F(CSCModuleTest, ComposeImage_0503, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->ComposeImage(SurfaceBuffer::Create(), nullptr, SurfaceBuffer::Create(), false); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0504 + * @tc.func : ComposeImage + * @tc.desc : Call after SetParameter, outputHdrImage is null + */ +HWTEST_F(CSCModuleTest, ComposeImage_0504, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->ComposeImage(SurfaceBuffer::Create(), SurfaceBuffer::Create(), nullptr, false); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0505 + * @tc.func : ComposeImage + * @tc.desc : Call after SetParameter, inputSdrImage, inputGainmap and outputHdrImage is not null, legacy is false + */ +HWTEST_F(CSCModuleTest, ComposeImage_0505, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto inputSdrImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto inputGainmap = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto outputHdrImage = CreateSurfaceBuffer(OUTPUT_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, inputSdrImage); + ASSERT_NE(nullptr, inputGainmap); + ASSERT_NE(nullptr, outputHdrImage); + + int32_t ret = plugin->ComposeImage(inputSdrImage, inputGainmap, outputHdrImage, false); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0506 + * @tc.func : ComposeImage + * @tc.desc : Call after SetParameter, inputSdrImage, inputGainmap and outputHdrImage is not null, legacy is true + */ +HWTEST_F(CSCModuleTest, ComposeImage_0506, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto inputSdrImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto inputGainmap = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto outputHdrImage = CreateSurfaceBuffer(OUTPUT_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, inputSdrImage); + ASSERT_NE(nullptr, inputGainmap); + ASSERT_NE(nullptr, outputHdrImage); + + int32_t ret = plugin->ComposeImage(inputSdrImage, inputGainmap, outputHdrImage, true); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0601 + * @tc.func : DecomposeImage + * @tc.desc : Call after Create + */ +HWTEST_F(CSCModuleTest, DecomposeImage_0601, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + int32_t ret = plugin->DecomposeImage( + SurfaceBuffer::Create(), SurfaceBuffer::Create(), SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0602 + * @tc.func : DecomposeImage + * @tc.desc : Call after SetParameter, inputImage is null + */ +HWTEST_F(CSCModuleTest, DecomposeImage_0602, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto inputImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, inputImage); + + int32_t ret = plugin->DecomposeImage(inputImage, SurfaceBuffer::Create(), SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0603 + * @tc.func : DecomposeImage + * @tc.desc : Call after SetParameter, outputSdrImage is null + */ +HWTEST_F(CSCModuleTest, DecomposeImage_0603, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto outputSdrImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, outputSdrImage); + + int32_t ret = plugin->DecomposeImage(SurfaceBuffer::Create(), outputSdrImage, SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0604 + * @tc.func : DecomposeImage + * @tc.desc : Call after SetParameter, outputGainmap is null + */ +HWTEST_F(CSCModuleTest, DecomposeImage_0604, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto outputGainmap = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, outputGainmap); + + int32_t ret = plugin->DecomposeImage(SurfaceBuffer::Create(), SurfaceBuffer::Create(), outputGainmap); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0605 + * @tc.func : DecomposeImage + * @tc.desc : Call after SetParameter, inputImage, outputSdrImage and outputGainmap is not null + */ +HWTEST_F(CSCModuleTest, DecomposeImage_0605, TestSize.Level1) +{ + auto plugin = ColorSpaceConverter::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto inputImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto outputSdrImage = CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT); + auto outputGainmap = CreateSurfaceBuffer(OUTPUT_PIXEL_FORMAT, WIDTH, HEIGHT); + ASSERT_NE(nullptr, inputImage); + ASSERT_NE(nullptr, outputSdrImage); + ASSERT_NE(nullptr, outputGainmap); + + int32_t ret = plugin->DecomposeImage(inputImage, outputSdrImage, outputGainmap); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/moduletest/colorspace_converter_video/BUILD.gn b/test/moduletest/colorspace_converter_video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..2d8a2b1798fff2678ee2e8f63ff11d1d542c6bab --- /dev/null +++ b/test/moduletest/colorspace_converter_video/BUILD.gn @@ -0,0 +1,60 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_moduletest("colorspace_converter_video_module_test") { + module_out_path = MODULE_TEST_OUTPUT_PATH + + include_dirs = [ + ".", + "$INTERFACES_INNER_API_DIR", + "$DFX_DIR/include", + "//foundation/graphic/graphic_2d/interfaces/inner_api", + "$COLORSPACE_CONVERTER_VIDEO_DIR/include", + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "api_test.cpp", + "func_test.cpp", + "state_test.cpp", + "demo_test.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "av_codec:av_codec_client", + "hilog:libhilog", + "hitrace:hitrace_meter", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/moduletest/colorspace_converter_video/api_test.cpp b/test/moduletest/colorspace_converter_video/api_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7657125730769c4cb35fb97a1b6d6f1c970bdaa3 --- /dev/null +++ b/test/moduletest/colorspace_converter_video/api_test.cpp @@ -0,0 +1,496 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "colorspace_converter_video.h" +#include "colorspace_converter_video_description.h" +#include "colorspace_converter_video_common.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_errors.h" +#include "media_description.h" +#include "algorithm_common.h" +#include "surface/window.h" +#include "external_window.h" +#include "colorspace_converter_video_impl.h" +#include "securec.h" + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; +constexpr uint32_t DEFAULT_BYTE = 32; +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { +class CscVCB : public ColorSpaceConverterVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override; + void OnOutputFormatChanged(const Format& formatOutput) override; +}; +void CscVCB::OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + (void)index; + (void)flag; +} +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; +} +void CscVCB::OnState(int32_t state) +{ + (void)state; +} +void CscVCB::OnOutputFormatChanged(const Format& formatOutput) +{ + (void)formatOutput; +} + +constexpr CM_ColorSpaceInfo COLORSPACE_INFO_HDR_BT2020_HLG = { + COLORPRIMARIES_BT2020, + TRANSFUNC_HLG, + MATRIX_BT2020, + RANGE_LIMITED +}; + +class CSCVInnerApiTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + cscv_ = ColorSpaceConverterVideo::Create(); + cscvCb_ = std::make_shared(); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + format.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, 0.0); + + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_P010)); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT2020); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_HLG); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT2020); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + }; + void TearDown() + { + if (cscv_) + cscv_->Release(); + }; + sptr surface; + OHNativeWindow *nativeWindow; + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer); + GSError SetMeatadata(sptr &buffer, uint32_t value); + GSError SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + void InitBufferConfig(); + std::shared_ptr cscv_ = nullptr; + std::shared_ptr cscvCb_ = nullptr; + OHOS::Media::Format format; + OHOS::Media::Format format2; +}; + +int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} + +GSError CSCVInnerApiTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + return err; +} + +GSError CSCVInnerApiTest::SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + return err; +} + +void CSCVInnerApiTest::InitBufferConfig() +{ + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = DEFAULT_BYTE; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; +} + +uint32_t CSCVInnerApiTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) +{ + struct Region region; + struct Region::Rect *rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = DEFAULT_WIDTH; + rect->h = DEFAULT_HEIGHT; + region.rects = rect; + NativeWindowHandleOpt(nativeWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(nativeWindow, ohNativeWindowBuffer, -1, region); + delete rect; + if (err != 0) { + cout << "FlushBuffer failed" << endl; + return 1; + } + return 0; +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0010 + * @tc.name : release output buffer api with illegal parameter + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0010, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscv2; + sptr surface2; + ret = cscv_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = cscv_->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + cscv2 = ColorSpaceConverterVideo::Create(); + surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->ReleaseOutputBuffer(100000, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.number : CSCV_API_0020 + * @tc.name : SetParameter test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0020, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscv2; + sptr surface2; + ret = cscv_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = cscv_->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + cscv2 = ColorSpaceConverterVideo::Create(); + surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->SetParameter(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OHOS::Media::Format format2; + ret = cscv_->SetParameter(format2); + ASSERT_NE(ret, VPE_ALGO_ERR_OK); +} +/** + * @tc.number : CSCV_API_0030 + * @tc.name : NotifyEOS test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0030, TestSize.Level2) +{ + int32_t ret = 0; + OHNativeWindowBuffer *ohNativeWindowBuffer; + ret = cscv_->SetCallback(cscvCb_); + surface = cscv_->CreateInputSurface(); + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0040 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0040, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscvImpl; + cscvImpl = make_shared(); + ret = cscv_->SetCallback(cscvCb_); + sptr surface = cscv_->CreateInputSurface(); + ret = cscvImpl->Init(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->SetOutputSurface(surface); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->OnProducerBufferReleased(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0050 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0050, TestSize.Level2) +{ + int32_t ret = 0; + OHNativeWindowBuffer *ohNativeWindowBuffer; + ret = cscv_->SetCallback(cscvCb_); + surface = cscv_->CreateInputSurface(); + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->ReleaseOutputBuffer(0, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0060 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0060, TestSize.Level2) +{ + int32_t ret = 0; + ret = cscv_->SetCallback(cscvCb_); + sptr surface1 = cscv_->CreateInputSurface(); + std::shared_ptr cscv2; + cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetOutputSurface(surface1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->SetOutputSurface(surface1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->ReleaseOutputBuffer(0, 0); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0061 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0061, TestSize.Level2) +{ + int32_t ret = 0; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + GraphicPixelFormat surfacePixelFmt; + format.GetIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, *(int *)&surfacePixelFmt); + requestCfg_.format = surfacePixelFmt; + InitBufferConfig(); + sptr surface1 = cscv_->CreateInputSurface(); + std::shared_ptr cscv2; + cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = cscv_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + cscv2->Release(); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0062 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0062, TestSize.Level2) +{ + int32_t ret = 0; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + GraphicPixelFormat surfacePixelFmt; + format.GetIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, *(int *)&surfacePixelFmt); + requestCfg_.format = surfacePixelFmt; + InitBufferConfig(); + sptr surface1 = cscv_->CreateInputSurface(); + std::shared_ptr cscv2; + cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + std::shared_ptr cscv3; + cscv3 = ColorSpaceConverterVideo::Create(); + sptr surface3 = cscv3->CreateInputSurface(); + ret = cscv_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = cscv_->SetOutputSurface(surface3); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + cscv2->Release(); + cscv3->Release(); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0063 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0063, TestSize.Level2) +{ + int32_t ret = 0; + GSError err = GSERROR_OK; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + CM_ColorSpaceInfo inColspcInfo = COLORSPACE_INFO_HDR_BT2020_HLG; + CM_HDR_Metadata_Type inMetaType = CM_METADATA_NONE; + GraphicPixelFormat surfacePixelFmt; + format2.GetIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, *(int *)&surfacePixelFmt); + requestCfg_.format = surfacePixelFmt; + InitBufferConfig(); + sptr surface1 = cscv_->CreateInputSurface(); + std::shared_ptr cscv2; + cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + std::shared_ptr cscv3; + cscv3 = ColorSpaceConverterVideo::Create(); + sptr surface3 = cscv3->CreateInputSurface(); + cscv_->SetCallback(cscvCb_); + cscv_->SetOutputSurface(surface2); + cscv_->Configure(format); + cscv_->Prepare(); + ret = cscv_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + for (int i = 0; i < 3; i++) { + sptr surfaceTmp = (i % 2) ? surface2 : surface3; + err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + err = SetMeatadata(buffer, inColspcInfo); + ASSERT_EQ(err, GSERROR_OK); + err = SetMeatadata(buffer, (uint32_t)inMetaType); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = cscv_->SetOutputSurface(surfaceTmp); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv_->NotifyEos(); + sleep(2); + } + cscv2->Release(); + cscv3->Release(); +} +} \ No newline at end of file diff --git a/test/moduletest/colorspace_converter_video/demo_test.cpp b/test/moduletest/colorspace_converter_video/demo_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1c85bad55db8921a597795967cb5c7912899b5a0 --- /dev/null +++ b/test/moduletest/colorspace_converter_video/demo_test.cpp @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "colorspace_converter_video_description.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_errors.h" +#include "media_description.h" +#include "algorithm_common.h" +#include "surface/window.h" +#include "external_window.h" +#include "colorspace_converter_video_impl.h" +#include "colorspace_converter_video.h" +#include "colorspace_converter_video_description.h" +#include "colorspace_converter_video_common.h" +#include "vpe_log.h" + +constexpr uint32_t DEFAULT_WIDTH = 3840; // 1920; +constexpr uint32_t DEFAULT_HEIGHT = 2160; // 1080; +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { + +std::shared_ptr cscv_ = nullptr; +sptr outSurface_; +sptr surface2; +BufferRequestConfig requestCfg_{}; +sptr outputBuffer = nullptr; +class CscVCB : public ColorSpaceConverterVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override; + void OnOutputFormatChanged(const Format& format) override; +}; + +void CscVCB::OnOutputFormatChanged(const Format& format) +{ + (void)format; +} +void CscVCB::OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + (void)flag; + cscv_->ReleaseOutputBuffer(index, true); +} + +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; +} + +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +constexpr CM_ColorSpaceInfo COLORSPACE_INFO_HDR_BT2020 = { + COLORPRIMARIES_BT2020, // 2 + TRANSFUNC_HLG, // 1 + MATRIX_BT2020, // 2 + RANGE_LIMITED // 2 +}; + + +class CscvDemoTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + cscv_ = ColorSpaceConverterVideo::Create(); + cscvCb_ = std::make_shared(); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + format_.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, 0.0); + }; + void TearDown() + { + if (cscv_) + cscv_->Release(); + }; + sptr surface; + OHNativeWindow *nativeWindow; + GSError SetMeatadata(sptr &buffer, uint32_t value); + GSError SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + void OutputBufferAvailable(uint32_t index, CscvBufferFlag flag); + GSError OnProducerBufferReleased(); + std::shared_ptr cscvCb_ = nullptr; + OHOS::Media::Format format_; +}; + + +class ImageProcessBufferConsumerListener : public OHOS::IBufferConsumerListener { +public: + void OnBufferAvailable() override; +}; + +void ImageProcessBufferConsumerListener::OnBufferAvailable() +{ + sptr fence{nullptr}; + int64_t timestamp = 0; + OHOS::Rect damage; + outSurface_->AcquireBuffer(outputBuffer, fence, timestamp, damage); +} + +GSError CscvDemoTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + return err; +} + +GSError CscvDemoTest::SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + return err; +} + +void CscvDemoTest::OutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + (void)flag; + cscv_->ReleaseOutputBuffer(index, true); +} + +GSError CscvDemoTest::OnProducerBufferReleased() +{ + sptr buffertmp; + sptr fencetmp{nullptr}; + surface2->RequestBuffer(buffertmp, fencetmp, requestCfg_); + return GSERROR_OK; +} + +HWTEST_F(CscvDemoTest, DEMO_RUN_TEST, TestSize.Level1) +{ + BufferFlushConfig flushCfg_{}; + sptr buffer; + // input SDR + CM_ColorSpaceInfo inColspcInfo = COLORSPACE_INFO_HDR_BT2020; + CM_HDR_Metadata_Type inMetaType = CM_VIDEO_HDR_VIVID; + GraphicPixelFormat surfacePixelFmt = GRAPHIC_PIXEL_FMT_YCBCR_P010; + requestCfg_.format = surfacePixelFmt; + requestCfg_.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE + | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 16; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; + sptr surface1 = cscv_->CreateInputSurface(); + outSurface_ = Surface::CreateSurfaceAsConsumer("ConvertInputSurface2"); + sptr listener = new ImageProcessBufferConsumerListener(); + outSurface_->RegisterConsumerListener(listener); + sptr producer = outSurface_->GetProducer(); + surface2 = Surface::CreateSurfaceAsProducer(producer); + outSurface_->SetQueueSize(5); + surface2->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + ASSERT_EQ(cscv_->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->SetOutputSurface(surface2), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Configure(format_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Start(), VPE_ALGO_ERR_OK); + int frames = 5; // 5 + while (frames) { + sptr fence{nullptr}; + sptr buffer2; + surface1->RequestBuffer(buffer2, fence, requestCfg_); + SetMeatadata(buffer2, inColspcInfo); + SetMeatadata(buffer2, (uint32_t)inMetaType); + surface1->FlushBuffer(buffer2, -1, flushCfg_); + frames--; + outSurface_->ReleaseBuffer(outputBuffer, -1); + usleep(1 * 100 * 1000); + } + ASSERT_EQ(cscv_->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Release(), VPE_ALGO_ERR_OK); +} + +} \ No newline at end of file diff --git a/test/moduletest/colorspace_converter_video/func_test.cpp b/test/moduletest/colorspace_converter_video/func_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..601fcf62db3c927a35a54b72f062439418f7ed86 --- /dev/null +++ b/test/moduletest/colorspace_converter_video/func_test.cpp @@ -0,0 +1,399 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "colorspace_converter_video.h" +#include "colorspace_converter_video_description.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_common.h" +#include "algorithm_errors.h" +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +using namespace std; +namespace { +constexpr int32_t NONEXIST_VAL = 1000; +class CscVCB : public ColorSpaceConverterVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override; + void OnOutputFormatChanged(const Format& format) override; +}; +void CscVCB::OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + (void)index; + (void)flag; +} +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; +} +void CscVCB::OnOutputFormatChanged(const Format& format) +{ + (void)format; +} +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +class CSCVInnerFuncTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + cscv_ = ColorSpaceConverterVideo::Create(); + cscvCb_ = std::make_shared(); + cscv_->SetCallback(cscvCb_); + surface = cscv_->CreateInputSurface(); + outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + cscv_->SetOutputSurface(producerSurface); + }; + void TearDown() + { + cscv_->Stop(); + cscv_->Release(); + }; + void SetMeta(int32_t csPrimaries, int32_t csTransFunc, int32_t csMatrix, int32_t csRange, int32_t metaType); + void SetMetaPixFmt(int32_t pixFmt); + void SetMetaExtra(); + void SetRenderIntent(); + std::shared_ptr cscv_ = nullptr; + std::shared_ptr cscvCb_ = nullptr; + OHOS::Media::Format format_; + sptr surface; + sptr outSurface; +}; + +void CSCVInnerFuncTest::SetMeta(int32_t csPrimaries, int32_t csTransFunc, + int32_t csMatrix, int32_t csRange, int32_t metaType) +{ + if (csPrimaries >= 0) + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, csPrimaries); + if (csTransFunc >= 0) + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, csTransFunc); + if (csMatrix >= 0) + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, csMatrix); + if (csRange >= 0) + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, csRange); + if (metaType >= 0) + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, metaType); +} + +void CSCVInnerFuncTest::SetMetaPixFmt(int32_t pixFmt) +{ + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, pixFmt); +} + +void CSCVInnerFuncTest::SetRenderIntent() +{ + format_.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); +} + +void CSCVInnerFuncTest::SetMetaExtra() +{ + format_.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, 0.0); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0010 + * @tc.name : call Configure with non COLORPRIMARIES_BT709 value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0010, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT601_P, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0020 + * @tc.name : call Configure with non TRANSFUNC_BT709 value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0020, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_HLG, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0030 + * @tc.name : call Configure with non MATRIX_BT709 value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0030, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT601_N, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0040 + * @tc.name : call Configure with non RANGE_LIMITED value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0040, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_FULL, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0050 + * @tc.name : call Configure with non CM_METADATA_NONE value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0050, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_VIDEO_HDR_VIVID); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0060 + * @tc.name : call Configure with non NV12 pix_fmt value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0060, TestSize.Level2) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(GRAPHIC_PIXEL_FMT_BGRA_8888); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0070 + * @tc.name : call Configure with nonexist value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0070, TestSize.Level2) +{ + SetMeta(NONEXIST_VAL, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0080 + * @tc.name : Configure with BT709 value and check getParameter return value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0080, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); + OHOS::Media::Format ret_meta; + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->GetParameter(ret_meta)); + int32_t primaries = 0; + ASSERT_EQ(true, ret_meta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, primaries)); + ASSERT_EQ(COLORPRIMARIES_BT709, primaries); + int32_t transFunc = 0; + ASSERT_EQ(true, ret_meta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, transFunc)); + ASSERT_EQ(TRANSFUNC_BT709, transFunc); + int32_t matrix = 0; + ASSERT_EQ(true, ret_meta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, matrix)); + ASSERT_EQ(MATRIX_BT709, matrix); + int32_t range = 0; + ASSERT_EQ(true, ret_meta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, range)); + ASSERT_EQ(RANGE_LIMITED, range); + int32_t metaVal = 0; + ASSERT_EQ(true, ret_meta.GetIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, metaVal)); + ASSERT_EQ(CM_METADATA_NONE, metaVal); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0090 + * @tc.name : Configure with BT709 value, skip metatype, and check getParameter return value + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0090, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, -1); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Prepare()); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->Start()); + OHOS::Media::Format retMeta; + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv_->GetParameter(retMeta)); + int32_t primaries = 0; + ASSERT_EQ(true, retMeta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, primaries)); + ASSERT_EQ(COLORPRIMARIES_BT709, primaries); + int32_t transFunc = 0; + ASSERT_EQ(true, retMeta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, transFunc)); + ASSERT_EQ(TRANSFUNC_BT709, transFunc); + int32_t matrix = 0; + ASSERT_EQ(true, retMeta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, matrix)); + ASSERT_EQ(MATRIX_BT709, matrix); + int32_t range = 0; + ASSERT_EQ(true, retMeta.GetIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, range)); + ASSERT_EQ(RANGE_LIMITED, range); + int32_t metaVal = 0; + ASSERT_EQ(false, retMeta.GetIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, metaVal)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0100 + * @tc.name : Configure with BT709 value ,skip colorspace + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0100, TestSize.Level1) +{ + SetMeta(-1, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0110 + * @tc.name : Configure with BT709 value ,skip all colorspace + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0110, TestSize.Level1) +{ + SetMeta(-1, -1, -1, -1, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0120 + * @tc.name : Configure with BT709 value ,skip pixel format + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0120, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0130 + * @tc.name : Configure with BT709 value ,skip transfunc/matrix/range + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0130, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, -1, -1, -1, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0140 + * @tc.name : Configure with BT709 value ,skip matrix/range + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0140, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, -1, -1, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0150 + * @tc.name : Meta test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0150, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, -1, -1, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCVInnerFuncTest_CSCV_FUNC_0160 + * @tc.name : Meta test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0160, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, -1, CM_METADATA_NONE); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + SetRenderIntent(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCV_FUNC_0170 + * @tc.name : Meta test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0170, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaExtra(); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} + +/** + * @tc.number : CSCV_FUNC_0180 + * @tc.name : Meta test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerFuncTest, CSCV_FUNC_0180, TestSize.Level1) +{ + SetMeta(COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED, CM_METADATA_NONE); + SetMetaExtra(); + SetMetaPixFmt(int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + ASSERT_NE(VPE_ALGO_ERR_OK, cscv_->Configure(format_)); +} +} \ No newline at end of file diff --git a/test/moduletest/colorspace_converter_video/state_test.cpp b/test/moduletest/colorspace_converter_video/state_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2ea5f97eebd8ffa7f31005a896793932644065f8 --- /dev/null +++ b/test/moduletest/colorspace_converter_video/state_test.cpp @@ -0,0 +1,766 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "colorspace_converter_video.h" +#include "media_description.h" +#include "colorspace_converter_video_description.h" +#include "colorspace_converter_video_common.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_common.h" +#include "algorithm_errors.h" + +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +using namespace std; +namespace { +OHOS::Media::Format format; +const std::string DEFAULT_FILE = "/data/test/media/1080p_Vivid.mp4"; + +enum CSCV_API_NAME { + SETCALLBACK = 1, + CREATEINPUTSURFACE, + SETOUTPUTSURFACE, + CONFIGURE, +}; + +uint32_t g_errorCount = 0; +class CscVCB : public ColorSpaceConverterVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override; + void OnOutputFormatChanged(const Format& formatOutput) override; +}; +void CscVCB::OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) +{ + (void)index; + (void)flag; +} +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; + g_errorCount++; +} +void CscVCB::OnOutputFormatChanged(const Format& formatOutput) +{ + (void)formatOutput; +} +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +class CSCVInnerStateTest : public testing::Test { +public: + static void SetUpTestCase(void) + { + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + format.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + }; + static void TearDownTestCase(void) {}; + void SetUp() + { + cscv = ColorSpaceConverterVideo::Create(); + cscvCb_ = std::make_shared(); + g_errorCount = 0; + }; + void TearDown() + { + if (cscv) { + cscv->Release(); + cscv = nullptr; + } + }; + + void PrepareFunc(int num); + void AllPrepareFunc(); + + std::shared_ptr cscvCb_ = nullptr; + std::shared_ptr cscv = nullptr; +}; + +void CSCVInnerStateTest::PrepareFunc(int num) +{ + int32_t ret = 0; + std::shared_ptr cscv2; + sptr surface2; + sptr surface; + switch (num) { + case SETCALLBACK: + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + case CREATEINPUTSURFACE: + surface = cscv->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + break; + case SETOUTPUTSURFACE: + cscv2 = ColorSpaceConverterVideo::Create(); + surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + case CONFIGURE: + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + default: + break; + } +} + +void CSCVInnerStateTest::AllPrepareFunc() +{ + int32_t ret = 0; + std::shared_ptr cscv2; + sptr surface2; + sptr surface; + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = cscv->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + cscv2 = ColorSpaceConverterVideo::Create(); + surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0010 + * @tc.name : call all combination of prepare-state func + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0010, TestSize.Level1) +{ + std::vector nums = {1, 2, 3, 4}; + int32_t ret = 0; + do { + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + cscv = nullptr; + cscv = ColorSpaceConverterVideo::Create(); + ASSERT_NE(cscv, nullptr); + for (int num:nums) + PrepareFunc(num); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + } while (std::next_permutation(nums.begin(), nums.end())); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0020 + * @tc.name : call start func without fully prepare(configure) + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0020, TestSize.Level2) +{ + int32_t ret = 0; + sptr surface = cscv->CreateInputSurface(); + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + ASSERT_NE(cscv2, nullptr); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ASSERT_NE(surface, nullptr); + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0030 + * @tc.name : call start func without fully prepare(SetCallback) + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0030, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + ASSERT_NE(cscv2, nullptr); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + sptr surface = cscv->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0040 + * @tc.name : call start func without fully prepare(SetOutputSurface) + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0040, TestSize.Level2) +{ + int32_t ret = 0; + sptr surface = cscv->CreateInputSurface(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0050 + * @tc.name : call start func without fully prepare(CreateInputSurface) + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0050, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0060 + * @tc.name : call prepare/start/stop/notifyeos without initialize + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0060, TestSize.Level2) +{ + int32_t ret = 0; + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0070 + * @tc.name : repeat call CreateInputSurface + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0070, TestSize.Level1) +{ + ASSERT_NE(cscv->CreateInputSurface(), nullptr); + ASSERT_EQ(cscv->CreateInputSurface(), nullptr); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0080 + * @tc.name : repeat call SetOutputSurface + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0080, TestSize.Level1) +{ + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0090 + * @tc.name : repeat call SetCallback + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0090, TestSize.Level1) +{ + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0100 + * @tc.name : repeat call Configure with different value + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0100, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(VPE_ALGO_ERR_OK, cscv->SetOutputSurface(producerSurface)); + + OHOS::Media::Format format2, format3; + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT601_P); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + format2.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, int(GRAPHIC_PIXEL_FMT_YCBCR_420_SP)); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_HLG); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + format3.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + int(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + + ASSERT_EQ(cscv->Configure(format2), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format3), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0110 + * @tc.name : call configure -> start -> stop -> eos + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0110, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0120 + * @tc.name : call configure -> reset -> configure -> prepare -> start ->eos ->release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0120, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0130 + * @tc.name : call configure -> reset -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0130, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0140 + * @tc.name : call configure -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0140, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0150 + * @tc.name : call configure -> reset -> CreateInputSurface -> + SetOutputSurface -> SetCallback -> configure -> prepare ->start -> eos ->release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0150, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + auto surface = cscv->CreateInputSurface(); + ASSERT_EQ(surface, nullptr); + std::shared_ptr cscv2 = ColorSpaceConverterVideo::Create(); + sptr surface2 = cscv2->CreateInputSurface(); + ret = cscv->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Configure(format); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ASSERT_EQ(g_errorCount, 0); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0160 + * @tc.name : call configure -> reset -> prepare -> start -> stop -> eos + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0160, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0170 + * @tc.name : call configure -> release ->(prepare-reset) + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0170, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = cscv->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscv->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = cscv->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0180 + * @tc.name : repeat call start/stop in processing + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0180, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0190 + * @tc.name : configure -> prepare -> start -> stop -> release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0190, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0200 + * @tc.name : configure -> prepare -> start -> stop -> reset -> release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0200, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); + cscv = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0210 + * @tc.name : configure -> prepare -> start -> stop -> (error)configure/prepare/stop/eos + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0210, TestSize.Level2) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0220 + * @tc.name : configure -> prepare -> start -> eos -> release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0220, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); + cscv = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0230 + * @tc.name : configure -> prepare -> start -> eos -> reset -> release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0230, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); + cscv = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0240 + * @tc.name : configure -> prepare -> start -> eos -> (error)configure/prepare/start/stop/eos + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0240, TestSize.Level2) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0250 + * @tc.name : configure -> prepare -> start -> release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0250, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0260 + * @tc.name : configure -> prepare -> start -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0260, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); + cscv = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0270 + * @tc.name : configure -> prepare -> start -> configure + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0270, TestSize.Level1) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0280 + * @tc.name : configure -> prepare -> (error)configure/stop/eos/reset/release + * @tc.desc : state test + */ +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0280, TestSize.Level2) +{ + sptr surface = cscv->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Start(), VPE_ALGO_ERR_OK); + + ASSERT_EQ(cscv->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Configure(format), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(cscv->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv->Release(), VPE_ALGO_ERR_OK); + cscv = nullptr; +} + +} \ No newline at end of file diff --git a/test/moduletest/metadata_generator/BUILD.gn b/test/moduletest/metadata_generator/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..4bd4a595e2fcc46d9f5632a9672ade4e3684534c --- /dev/null +++ b/test/moduletest/metadata_generator/BUILD.gn @@ -0,0 +1,51 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_moduletest("metadata_generator_module_test") { + module_out_path = MODULE_TEST_OUTPUT_PATH + + include_dirs = [ + ".", + "$INTERFACES_INNER_API_DIR", + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "mg_module_test.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/moduletest/metadata_generator/mg_module_test.cpp b/test/moduletest/metadata_generator/mg_module_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a6d7c416b7119d600781cf280427cff62070e78f --- /dev/null +++ b/test/moduletest/metadata_generator/mg_module_test.cpp @@ -0,0 +1,194 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "refbase.h" +#include "surface.h" +#include "metadata_generator.h" +#include "algorithm_errors.h" + +using namespace std; +using namespace testing::ext; + +namespace { +constexpr int32_t WIDTH = 1920; +constexpr int32_t HEIGHT = 1080; +} + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class MGModuleTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() {}; + void TearDown() {}; + + void SetParameter(std::shared_ptr plugin); + sptr CreateSurfaceBuffer(); +}; + +void MGModuleTest::SetParameter(std::shared_ptr plugin) +{ + MetadataGeneratorParameter parameterSet; + int32_t ret = plugin->SetParameter(parameterSet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +sptr MGModuleTest::CreateSurfaceBuffer() +{ + auto buffer = SurfaceBuffer::Create(); + if (nullptr == buffer) { + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = WIDTH; + inputCfg.height = HEIGHT; + inputCfg.strideAlignment = WIDTH; + inputCfg.usage = BUFFER_USAGE_HW_TEXTURE; + inputCfg.format = GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + if (GSERROR_OK != err) { + return nullptr; + } + + return buffer; +} + +/** + * @tc.number : 0101 + * @tc.func : Create + * @tc.desc : Test for MetadataGenerator Create + */ +HWTEST_F(MGModuleTest, Create_0101, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); +} + +/** + * @tc.number : 0201 + * @tc.func : SetParameter + * @tc.desc : parameter.renderIntent != RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC + */ +HWTEST_F(MGModuleTest, SetParameter_0201, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + MetadataGeneratorParameter parameterSet; + int32_t ret = plugin->SetParameter(parameterSet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0202 + * @tc.func : SetParameter + * @tc.desc : parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC + */ +HWTEST_F(MGModuleTest, SetParameter_0202, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); +} + +/** + * @tc.number : 0301 + * @tc.func : GetParameter + * @tc.desc : Call after Create + */ +HWTEST_F(MGModuleTest, GetParameter_0301, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + MetadataGeneratorParameter parameterGet; + int32_t ret = plugin->GetParameter(parameterGet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0302 + * @tc.func : GetParameter + * @tc.desc : Call after SetParameter + */ +HWTEST_F(MGModuleTest, GetParameter_0302, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + MetadataGeneratorParameter parameterGet; + int32_t ret = plugin->GetParameter(parameterGet); + ASSERT_EQ(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0401 + * @tc.func : Process + * @tc.desc : Call after Create + */ +HWTEST_F(MGModuleTest, Process_0401, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + int32_t ret = plugin->Process(SurfaceBuffer::Create()); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0402 + * @tc.func : Process + * @tc.desc : Call after SetParameter, input is null + */ +HWTEST_F(MGModuleTest, Process_0402, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + int32_t ret = plugin->Process(nullptr); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} + +/** + * @tc.number : 0404 + * @tc.func : Process + * @tc.desc : Call after SetParameter, input is not null + */ +HWTEST_F(MGModuleTest, Process_0404, TestSize.Level1) +{ + auto plugin = MetadataGenerator::Create(); + ASSERT_NE(nullptr, plugin); + + SetParameter(plugin); + + auto input = CreateSurfaceBuffer(); + ASSERT_NE(nullptr, input); + + int32_t ret = plugin->Process(input); + ASSERT_NE(VPE_ALGO_ERR_OK, ret); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/moduletest/metadata_generator_video/BUILD.gn b/test/moduletest/metadata_generator_video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..1e0b7e35112036c4ab7bb42352826c1a7d064022 --- /dev/null +++ b/test/moduletest/metadata_generator_video/BUILD.gn @@ -0,0 +1,59 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_moduletest("metadata_generator_video_module_test") { + module_out_path = MODULE_TEST_OUTPUT_PATH + + include_dirs = [ + ".", + "$INTERFACES_INNER_API_DIR", + "$DFX_DIR/include", + "//foundation/graphic/graphic_2d/interfaces/inner_api", + "$METADATA_GENERATOR_VIDEO_DIR/include", + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "api_test.cpp", + "state_test.cpp", + "demo_test.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "av_codec:av_codec_client", + "hilog:libhilog", + "hitrace:hitrace_meter", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/moduletest/metadata_generator_video/api_test.cpp b/test/moduletest/metadata_generator_video/api_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..328c2e61eb7c3466d3e76f8dd079fc0156ce4163 --- /dev/null +++ b/test/moduletest/metadata_generator_video/api_test.cpp @@ -0,0 +1,459 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "metadata_generator_video.h" +#include "metadata_generator_video_common.h" +#include "metadata_generator_video_impl.h" +#include "algorithm_errors.h" +#include "media_description.h" +#include "algorithm_common.h" +#include "surface/window.h" +#include "external_window.h" +#include "securec.h" + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; +constexpr uint32_t DEFAULT_BYTE = 32; +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { +class CscVCB : public MetadataGeneratorVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) override; +}; +void CscVCB::OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) +{ + (void)index; + (void)flag; +} +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; +} +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +constexpr CM_ColorSpaceInfo COLORSPACE_INFO_HDR_BT2020_HLG = { + COLORPRIMARIES_BT2020, + TRANSFUNC_HLG, + MATRIX_BT2020, + RANGE_LIMITED +}; + +class CSCVInnerApiTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + mdg_ = MetadataGeneratorVideo::Create(); + cscvCb_ = std::make_shared(); + }; + void TearDown() + { + if (mdg_) + mdg_->Release(); + }; + sptr surface; + OHNativeWindow *nativeWindow; + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer); + GSError SetMeatadata(sptr &buffer, uint32_t value); + GSError SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + void InitBufferConfig(); + std::shared_ptr mdg_ = nullptr; + std::shared_ptr cscvCb_ = nullptr; +}; + +int64_t GetSystemTimeUs() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} + +GSError CSCVInnerApiTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + return err; +} + +GSError CSCVInnerApiTest::SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + return err; +} + +void CSCVInnerApiTest::InitBufferConfig() +{ + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = DEFAULT_BYTE; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; +} + +uint32_t CSCVInnerApiTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) +{ + struct Region region; + struct Region::Rect *rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = DEFAULT_WIDTH; + rect->h = DEFAULT_HEIGHT; + region.rects = rect; + NativeWindowHandleOpt(nativeWindow, SET_UI_TIMESTAMP, GetSystemTimeUs()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(nativeWindow, ohNativeWindowBuffer, -1, region); + delete rect; + if (err != 0) { + cout << "FlushBuffer failed" << endl; + return 1; + } + return 0; +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0010 + * @tc.name : release output buffer api with illegal parameter + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0010, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr mdg2; + sptr surface2; + ret = mdg_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = mdg_->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + mdg2 = MetadataGeneratorVideo::Create(); + surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->ReleaseOutputBuffer(100000, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.number : CSCV_API_0020 + * @tc.name : SetParameter test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0020, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr mdg2; + sptr surface2; + ret = mdg_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = mdg_->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + mdg2 = MetadataGeneratorVideo::Create(); + surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} +/** + * @tc.number : CSCV_API_0030 + * @tc.name : NotifyEOS test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0030, TestSize.Level2) +{ + int32_t ret = 0; + OHNativeWindowBuffer *ohNativeWindowBuffer; + ret = mdg_->SetCallback(cscvCb_); + surface = mdg_->CreateInputSurface(); + std::shared_ptr mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0040 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0040, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr cscvImpl; + cscvImpl = make_shared(); + ret = mdg_->SetCallback(cscvCb_); + sptr surface = mdg_->CreateInputSurface(); + ret = cscvImpl->Init(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->SetOutputSurface(surface); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->OnProducerBufferReleased(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0050 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0050, TestSize.Level2) +{ + int32_t ret = 0; + OHNativeWindowBuffer *ohNativeWindowBuffer; + ret = mdg_->SetCallback(cscvCb_); + surface = mdg_->CreateInputSurface(); + std::shared_ptr mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->ReleaseOutputBuffer(0, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0060 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0060, TestSize.Level2) +{ + int32_t ret = 0; + ret = mdg_->SetCallback(cscvCb_); + sptr surface1 = mdg_->CreateInputSurface(); + std::shared_ptr mdg2; + mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetOutputSurface(surface1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->SetOutputSurface(surface1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->ReleaseOutputBuffer(0, 0); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0061 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0061, TestSize.Level2) +{ + int32_t ret = 0; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + requestCfg_.format = GRAPHIC_PIXEL_FMT_YCBCR_P010; + InitBufferConfig(); + sptr surface1 = mdg_->CreateInputSurface(); + std::shared_ptr mdg2; + mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = mdg_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + mdg2->Release(); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0062 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0062, TestSize.Level2) +{ + int32_t ret = 0; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + requestCfg_.format = GRAPHIC_PIXEL_FMT_YCBCR_P010; + InitBufferConfig(); + sptr surface1 = mdg_->CreateInputSurface(); + std::shared_ptr mdg2; + mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + std::shared_ptr mdg3; + mdg3 = MetadataGeneratorVideo::Create(); + sptr surface3 = mdg3->CreateInputSurface(); + ret = mdg_->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = mdg_->SetOutputSurface(surface3); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + mdg2->Release(); + mdg3->Release(); +} + +/** + * @tc.number : CSCVInnerApiTest_CSCV_API_0063 + * @tc.name : OnProducerBufferReleased api test + * @tc.desc : function test + */ +HWTEST_F(CSCVInnerApiTest, CSCV_API_0063, TestSize.Level2) +{ + int32_t ret = 0; + GSError err = GSERROR_OK; + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + CM_ColorSpaceInfo inColspcInfo = COLORSPACE_INFO_HDR_BT2020_HLG; + CM_HDR_Metadata_Type inMetaType = CM_METADATA_NONE; + requestCfg_.format = GRAPHIC_PIXEL_FMT_YCBCR_P010; + InitBufferConfig(); + sptr surface1 = mdg_->CreateInputSurface(); + std::shared_ptr mdg2; + mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + std::shared_ptr mdg3; + mdg3 = MetadataGeneratorVideo::Create(); + sptr surface3 = mdg3->CreateInputSurface(); + mdg_->SetCallback(cscvCb_); + mdg_->SetOutputSurface(surface2); + mdg_->Configure(); + mdg_->Prepare(); + ret = mdg_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + for (int i = 0; i < 3; i++) { + sptr surfaceTmp = (i % 2) ? surface2 : surface3; + err = surface1->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + err = SetMeatadata(buffer, inColspcInfo); + ASSERT_EQ(err, GSERROR_OK); + err = SetMeatadata(buffer, (uint32_t)inMetaType); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = surface1->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = mdg_->SetOutputSurface(surfaceTmp); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg_->NotifyEos(); + sleep(2); + } + mdg2->Release(); + mdg3->Release(); +} +} \ No newline at end of file diff --git a/test/moduletest/metadata_generator_video/demo_test.cpp b/test/moduletest/metadata_generator_video/demo_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e9c9589e978fb5a88156d73d8b022264fa660b9 --- /dev/null +++ b/test/moduletest/metadata_generator_video/demo_test.cpp @@ -0,0 +1,209 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "metadata_generator_video.h" +#include "colorspace_converter_video_description.h" +#include "metadata_generator_video_common.h" +#include "v1_0/cm_color_space.h" +#include "v1_0/hdr_static_metadata.h" +#include "algorithm_errors.h" +#include "media_description.h" +#include "algorithm_common.h" +#include "surface/window.h" +#include "external_window.h" +#include "metadata_generator_video_impl.h" +#include "vpe_log.h" + +constexpr uint32_t DEFAULT_WIDTH = 3840; // 1920; +constexpr uint32_t DEFAULT_HEIGHT = 2160; // 1080; +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { + +std::shared_ptr cscv_ = nullptr; +sptr outSurface_; +sptr surface2; +BufferRequestConfig requestCfg_{}; +sptr outputBuffer = nullptr; +class CscVCB : public MetadataGeneratorVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) override; +}; + +void CscVCB::OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) +{ + (void)flag; + cscv_->ReleaseOutputBuffer(index, true); +} + +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; +} + +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +constexpr CM_ColorSpaceInfo COLORSPACE_INFO_HDR_BT2020 = { + COLORPRIMARIES_BT2020, // 2 + TRANSFUNC_HLG, // 1 + MATRIX_BT2020, // 2 + RANGE_LIMITED // 2 +}; + + +class MdgDemoTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + cscv_ = MetadataGeneratorVideo::Create(); + cscvCb_ = std::make_shared(); + }; + void TearDown() + { + if (cscv_) + cscv_->Release(); + }; + sptr surface; + OHNativeWindow *nativeWindow; + GSError SetMeatadata(sptr &buffer, uint32_t value); + GSError SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + void OutputBufferAvailable(uint32_t index, MdgBufferFlag flag); + GSError OnProducerBufferReleased(); + std::shared_ptr cscvCb_ = nullptr; +}; + + +class ImageProcessBufferConsumerListener : public OHOS::IBufferConsumerListener { +public: + void OnBufferAvailable() override; +}; + +void ImageProcessBufferConsumerListener::OnBufferAvailable() +{ + sptr fence{nullptr}; + int64_t timestamp = 0; + OHOS::Rect damage; + outSurface_->AcquireBuffer(outputBuffer, fence, timestamp, damage); +} + +GSError MdgDemoTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + return err; +} + +GSError MdgDemoTest::SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + return err; +} + +void MdgDemoTest::OutputBufferAvailable(uint32_t index, MdgBufferFlag flag) +{ + (void)flag; + cscv_->ReleaseOutputBuffer(index, true); +} + +GSError MdgDemoTest::OnProducerBufferReleased() +{ + sptr buffertmp; + sptr fencetmp{nullptr}; + surface2->RequestBuffer(buffertmp, fencetmp, requestCfg_); + return GSERROR_OK; +} + +HWTEST_F(MdgDemoTest, DEMO_RUN_TEST, TestSize.Level1) +{ + BufferFlushConfig flushCfg_{}; + sptr buffer; + // input SDR + CM_ColorSpaceInfo inColspcInfo = COLORSPACE_INFO_HDR_BT2020; + CM_HDR_Metadata_Type inMetaType = CM_VIDEO_HDR_VIVID; + GraphicPixelFormat surfacePixelFmt = GRAPHIC_PIXEL_FMT_YCBCR_P010; + requestCfg_.format = surfacePixelFmt; + requestCfg_.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE + | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 16; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; + sptr surface1 = cscv_->CreateInputSurface(); + outSurface_ = Surface::CreateSurfaceAsConsumer("ConvertInputSurface2"); + sptr listener = new ImageProcessBufferConsumerListener(); + outSurface_->RegisterConsumerListener(listener); + sptr producer = outSurface_->GetProducer(); + surface2 = Surface::CreateSurfaceAsProducer(producer); + outSurface_->SetQueueSize(5); + surface2->RegisterReleaseListener([this](sptr &buffer) { + (void)buffer; + return OnProducerBufferReleased(); + }); + ASSERT_EQ(cscv_->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->SetOutputSurface(surface2), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Start(), VPE_ALGO_ERR_OK); + int frames = 10; // 10 + while (frames) { + sptr fence{nullptr}; + sptr buffer2; + if (frames == 2) { // 2 change width + requestCfg_.width = 1920; // 1920 + } + if (frames == 5) { // 5 change height + requestCfg_.height = 1080; // 1080 + } + if (frames == 8) { // 8 change format + requestCfg_.format = GRAPHIC_PIXEL_FMT_RGBA_1010102; + } + surface1->RequestBuffer(buffer2, fence, requestCfg_); + SetMeatadata(buffer2, inColspcInfo); + SetMeatadata(buffer2, (uint32_t)inMetaType); + surface1->FlushBuffer(buffer2, -1, flushCfg_); + frames--; + outSurface_->ReleaseBuffer(outputBuffer, -1); + usleep(1 * 100 * 1000); + } + ASSERT_EQ(cscv_->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(cscv_->Release(), VPE_ALGO_ERR_OK); +} + +} \ No newline at end of file diff --git a/test/moduletest/metadata_generator_video/state_test.cpp b/test/moduletest/metadata_generator_video/state_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..af3afd1e036b181de4e8a4120f7a8122e418aab3 --- /dev/null +++ b/test/moduletest/metadata_generator_video/state_test.cpp @@ -0,0 +1,706 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "metadata_generator_video.h" +#include "metadata_generator_video_common.h" +#include "algorithm_common.h" +#include "algorithm_errors.h" + +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +using namespace std; +namespace { +const std::string DEFAULT_FILE = "/data/test/media/1080p_Vivid.mp4"; + +enum CSCV_API_NAME { + SETCALLBACK = 1, + CREATEINPUTSURFACE, + SETOUTPUTSURFACE, + CONFIGURE, +}; + +uint32_t g_errorCount = 0; +class CscVCB : public MetadataGeneratorVideoCallback { +public: + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) override; +}; +void CscVCB::OnOutputBufferAvailable(uint32_t index, MdgBufferFlag flag) +{ + (void)index; + (void)flag; +} +void CscVCB::OnError(int32_t errorCode) +{ + (void)errorCode; + g_errorCount++; +} +void CscVCB::OnState(int32_t state) +{ + (void)state; +} + +class MDGInnerStateTest : public testing::Test { +public: + static void TearDownTestCase(void) {}; + void SetUp() + { + mdg = MetadataGeneratorVideo::Create(); + cscvCb_ = std::make_shared(); + g_errorCount = 0; + }; + void TearDown() + { + if (mdg) { + mdg->Release(); + mdg = nullptr; + } + }; + + void PrepareFunc(int num); + void AllPrepareFunc(); + + std::shared_ptr cscvCb_ = nullptr; + std::shared_ptr mdg = nullptr; +}; + +void MDGInnerStateTest::PrepareFunc(int num) +{ + int32_t ret = 0; + std::shared_ptr mdg2; + sptr surface2; + sptr surface; + switch (num) { + case SETCALLBACK: + ret = mdg->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + case CREATEINPUTSURFACE: + surface = mdg->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + break; + case SETOUTPUTSURFACE: + mdg2 = MetadataGeneratorVideo::Create(); + surface2 = mdg2->CreateInputSurface(); + ret = mdg->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + case CONFIGURE: + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + break; + default: + break; + } +} + +void MDGInnerStateTest::AllPrepareFunc() +{ + int32_t ret = 0; + std::shared_ptr mdg2; + sptr surface2; + sptr surface; + ret = mdg->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + surface = mdg->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + mdg2 = MetadataGeneratorVideo::Create(); + surface2 = mdg2->CreateInputSurface(); + ret = mdg->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0010 + * @tc.name : call all combination of prepare-state func + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0010, TestSize.Level1) +{ + std::vector nums = {1, 2, 3, 4}; + int32_t ret = 0; + do { + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + mdg = nullptr; + mdg = MetadataGeneratorVideo::Create(); + ASSERT_NE(mdg, nullptr); + for (int num:nums) + PrepareFunc(num); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + } while (std::next_permutation(nums.begin(), nums.end())); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0030 + * @tc.name : call start func without fully prepare(SetCallback) + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0030, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr mdg2 = MetadataGeneratorVideo::Create(); + ASSERT_NE(mdg2, nullptr); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + sptr surface = mdg->CreateInputSurface(); + ASSERT_NE(surface, nullptr); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0040 + * @tc.name : call start func without fully prepare(SetOutputSurface) + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0040, TestSize.Level2) +{ + int32_t ret = 0; + sptr surface = mdg->CreateInputSurface(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0050 + * @tc.name : call start func without fully prepare(CreateInputSurface) + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0050, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg->SetOutputSurface(surface2); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_OPERATION); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0060 + * @tc.name : call prepare/start/stop/notifyeos without initialize + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0060, TestSize.Level2) +{ + int32_t ret = 0; + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0070 + * @tc.name : repeat call CreateInputSurface + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0070, TestSize.Level1) +{ + ASSERT_NE(mdg->CreateInputSurface(), nullptr); + ASSERT_EQ(mdg->CreateInputSurface(), nullptr); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0080 + * @tc.name : repeat call SetOutputSurface + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0080, TestSize.Level1) +{ + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0090 + * @tc.name : repeat call SetCallback + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0090, TestSize.Level1) +{ + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0100 + * @tc.name : repeat call Configure with different value + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0100, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(VPE_ALGO_ERR_OK, mdg->SetOutputSurface(producerSurface)); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0110 + * @tc.name : call configure -> start -> stop -> eos + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0110, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0120 + * @tc.name : call configure -> reset -> configure -> prepare -> start ->eos ->release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0120, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0130 + * @tc.name : call configure -> reset -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0130, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0140 + * @tc.name : call configure -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0140, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0150 + * @tc.name : call configure -> reset -> CreateInputSurface -> + SetOutputSurface -> SetCallback -> configure -> prepare ->start -> eos ->release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0150, TestSize.Level1) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + auto surface = mdg->CreateInputSurface(); + ASSERT_EQ(surface, nullptr); + std::shared_ptr mdg2 = MetadataGeneratorVideo::Create(); + sptr surface2 = mdg2->CreateInputSurface(); + ret = mdg->SetOutputSurface(surface2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->SetCallback(cscvCb_); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ASSERT_EQ(g_errorCount, 0); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0160 + * @tc.name : call configure -> reset -> prepare -> start -> stop -> eos + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0160, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0170 + * @tc.name : call configure -> release ->(prepare-reset) + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0170, TestSize.Level2) +{ + int32_t ret = 0; + AllPrepareFunc(); + ret = mdg->Release(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = mdg->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Stop(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); + ret = mdg->Reset(); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0180 + * @tc.name : repeat call start/stop in processing + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0180, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0190 + * @tc.name : configure -> prepare -> start -> stop -> release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0190, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0200 + * @tc.name : configure -> prepare -> start -> stop -> reset -> release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0200, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); + mdg = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0210 + * @tc.name : configure -> prepare -> start -> stop -> (error)configure/prepare/stop/eos + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0210, TestSize.Level2) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0220 + * @tc.name : configure -> prepare -> start -> eos -> release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0220, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); + mdg = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0230 + * @tc.name : configure -> prepare -> start -> eos -> reset -> release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0230, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); + mdg = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0240 + * @tc.name : configure -> prepare -> start -> eos -> (error)configure/prepare/start/stop/eos + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0240, TestSize.Level2) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0250 + * @tc.name : configure -> prepare -> start -> release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0250, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0260 + * @tc.name : configure -> prepare -> start -> reset ->release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0260, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); + mdg = nullptr; +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0270 + * @tc.name : configure -> prepare -> start -> configure + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0270, TestSize.Level1) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + sleep(1); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_INVALID_STATE); +} + +/** + * @tc.number : CSCVInnerStateTest_CSCV_STATE_0280 + * @tc.name : configure -> prepare -> (error)configure/stop/eos/reset/release + * @tc.desc : state test + */ +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0280, TestSize.Level2) +{ + sptr surface = mdg->CreateInputSurface(); + sptr outSurface = Surface::CreateSurfaceAsConsumer("ConvertInputSurface"); + sptr producer = outSurface->GetProducer(); + sptr producerSurface = Surface::CreateSurfaceAsProducer(producer); + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Prepare(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Start(), VPE_ALGO_ERR_OK); + + ASSERT_EQ(mdg->SetOutputSurface(producerSurface), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->SetCallback(cscvCb_), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Configure(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Stop(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->NotifyEos(), VPE_ALGO_ERR_INVALID_STATE); + ASSERT_EQ(mdg->Reset(), VPE_ALGO_ERR_OK); + ASSERT_EQ(mdg->Release(), VPE_ALGO_ERR_OK); + mdg = nullptr; +} + +} \ No newline at end of file diff --git a/test/nativedemo/vpe_demo/BUILD.gn b/test/nativedemo/vpe_demo/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..394d6db7f673a433fedb2072d00fb4d781777fac --- /dev/null +++ b/test/nativedemo/vpe_demo/BUILD.gn @@ -0,0 +1,93 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_executable("vpe_demo") { + install_enable = false + + include_dirs = [ + "./include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample" + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "vpe_demo.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample:csc_test_utils" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} + +ohos_executable("detailEnh_demo") { + install_enable = false + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + include_dirs = [ + "./include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "//foundation/multimedia/media_foundation/interface/inner_api/meta" + ] + + defines = [] + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + cflags_cc = cflags + cflags_cc += [ "-std=c++17" ] + + sources = [ + "detail_enhancer_demo.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$TEST_UTILS_PATH/DetailEnhancer/sample:detailEnh_test_utils" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp b/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp new file mode 100644 index 0000000000000000000000000000000000000000..86b68415888ebb99ff281262c0cbd606b8df95d0 --- /dev/null +++ b/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp @@ -0,0 +1,263 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "DetailEnhDemo" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "graphic_common_c.h" +#include "detailEnh_sample.h" +#include "detailEnh_sample_define.h" +#include "detail_enhancer_image.h" + +using namespace OHOS; +using namespace Media; +using namespace VideoProcessingEngine; + +namespace { +const float SIZE_COEF_YUV420 = 1.5; +const float SIZE_COEF_RGBA8888 = 4; +const float SIZE_COEF_YUV444 = 3; + +std::shared_ptr DetailEnhancerImageCreate() +{ + auto detailEnh = DetailEnhancerImage::Create(); + if (detailEnh == nullptr) { + TEST_LOG("Create Detail enhancer failed"); + return nullptr; + } + return detailEnh; +} + +std::string GetFormatName(int32_t format) +{ + std::string formatName = "UNKNOWN"; + switch (format) { + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: + formatName = "NV12"; + break; + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: + formatName = "I420"; + break; + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + formatName = "RGBA"; + break; + case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: + formatName = "BGRA"; + break; + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102: + formatName = "RGBA10"; + break; + default: + TEST_LOG("Unknow format!"); + } + return formatName; +} + +int32_t GetFileSize(int32_t width, int32_t height, int32_t format) +{ + int32_t size = width * height; + switch (format) { + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: + size = size * SIZE_COEF_YUV420; + break; + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102: + size *= SIZE_COEF_RGBA8888; + break; + default: + TEST_LOG("Unknow format:%d", format); + size *= SIZE_COEF_YUV444; + break; + } + return size; +} + +int32_t GetImageType(int32_t format) +{ + int32_t imageType = 0; + switch (format) { + case RGBA: + imageType = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + break; + case BGRA: + imageType = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + break; + case NV12: + imageType = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + break; + case I420: + imageType = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + break; + case RGBA1010102: + imageType = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + break; + default: + imageType = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + } + return imageType; +} + +void Process(std::shared_ptr detailEnh, std::string_view inputFile, + std::vector inputParam, std::vector outputParam, bool needDump) +{ + if (inputFile == "UNKNOWN") { + TEST_LOG("Invalid input"); + return; + } + int32_t inputFormat = inputParam[0]; + int32_t inputWidth = inputParam[1]; + int32_t inputHeight = inputParam[2]; + int32_t outputFormat = outputParam[0]; + int32_t outputWidth = outputParam[1]; + int32_t outputHeight = outputParam[2]; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + TEST_LOG("inputFile:%s", inputFile.data()); + std::unique_ptr yuvFile = + std::make_unique(inputFile.data(), std::ios::binary | std::ios::in); + ReadYuvFile(input, yuvFile, GetFileSize(inputWidth, inputHeight, inputFormat)); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + yuvFile->seekg(0); + yuvFile->close(); + + int32_t ret = detailEnh->Process(input, output); + if (ret != VPE_ALGO_ERR_OK) { + TEST_LOG("Processed failed"); + return; + } + if (needDump) { + std::unique_ptr outputImage = std::make_unique( + "/data/test/media/output/" + GetFormatName(inputFormat) + "To" + GetFormatName(outputFormat) + "_" + + std::to_string(outputWidth) + "x" + std::to_string(outputHeight) + "_" + + std::to_string(output->GetStride()) + ".yuv", + std::ios::binary | std::ios::out | std::ios::trunc); + outputImage->write(static_cast(output->GetVirAddr()), output->GetSize()); + outputImage->close(); + } +} + +void RunWithSo(sptr & input, sptr & output, DetailEnhancerLevel level) +{ + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + typedef int32_t (*DetailEnhancerProcessImage)(int32_t, + OHNativeWindowBuffer*, OHNativeWindowBuffer*, int32_t); + typedef int32_t (*DetailEnhancerDestroy)(int32_t*); + + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + auto detailEnhProcessImage = + reinterpret_cast(dlsym(lib, "DetailEnhancerProcessImage")); + auto detailEnhDestroy = reinterpret_cast(dlsym(lib, "DetailEnhancerDestroy")); + + int32_t instanceSrId = -1; + int32_t res = detailEnhCreate(&instanceSrId); + if (res != 0 || instanceSrId == -1) { + TEST_LOG("create failed, res:%d, insta:%d\n", res, instanceSrId); + dlclose(lib); + return; + } + if (input == nullptr || output == nullptr) { + TEST_LOG("input invalid\n"); + return; + } + OHNativeWindowBuffer* srIn = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&input); + OHNativeWindowBuffer* srOut = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&output); + res = detailEnhProcessImage(instanceSrId, srIn, srOut, static_cast(level)); + if (res != 0) { + TEST_LOG("process failed\n"); + return; + } + res = detailEnhDestroy(&instanceSrId); + if (res != 0) { + TEST_LOG("destroy failed\n"); + return; + } + std::unique_ptr outputImage = std::make_unique( + "/data/test/media/output/" + GetFormatName(input->GetFormat()) + "To" + GetFormatName(output->GetFormat()) + + "_" + std::to_string(output->GetWidth()) + "x" + std::to_string(output->GetHeight())+ "_" + + std::to_string(output->GetStride()) + "_level"+ std::to_string(level) + "externC.yuv", + std::ios::binary | std::ios::out | std::ios::trunc); + outputImage->write(static_cast(output->GetVirAddr()), output->GetSize()); + outputImage->close(); +} +} + +int32_t main([[maybe_unused]]int argc, char* argv[]) +{ + TEST_LOG("USAGE exe inputWidth inputHeight outputWidth outputHeight pixelFormat\ + checkHightLevel checkSuperLevel checkDlopen needDump processTime enterTime\n"); + TEST_LOG("format: 5 <---> RGBA8888, 6 <---> NV12, 7 <---> YUV420, 8 <---> BGRA 9 <---> RGBA1010102\n"); + int32_t inputWidth = atoi(argv[1]); + int32_t inputHeight = atoi(argv[2]); + int32_t outputWidth = atoi(argv[3]); + int32_t outputHeight = atoi(argv[4]); + SUPPORT_FORMAT pixelFormat = static_cast(atoi(argv[5])); + bool checkDlopen = atoi(argv[6]); + bool needDump = atoi(argv[7]); + int32_t levelToProcess = atoi(argv[8]); + std::string inputFilePath = argv[9]; + int32_t processTime = atoi(argv[10]); + int32_t enterTime = atoi(argv[11]); + + if (!checkDlopen) { + for (int i = 0; i < enterTime; i++) { + auto detailEnh = DetailEnhancerImageCreate(); + if (detailEnh == nullptr) { + printf("detailEnh == nullptr"); + return -1; + } + DetailEnhancerParameters param { + .uri = "", + .level = static_cast(levelToProcess), + .forceEve = 1, + }; + if (detailEnh->SetParameter(param)!= VPE_ALGO_ERR_OK) { + printf("Init failed!"); + return -1; + } + for (int j = 0; j < processTime; j++) { + Process(detailEnh, inputFilePath, { GetImageType(pixelFormat), inputWidth, inputHeight }, + { GetImageType(pixelFormat), outputWidth, outputHeight }, needDump); + } + detailEnh = nullptr; + } + } else { + auto input = CreateSurfaceBuffer(GetImageType(pixelFormat), inputWidth, inputHeight); + std::string_view inputFile = inputFilePath; + std::unique_ptr yuvFile = + std::make_unique(inputFile.data(), std::ios::binary | std::ios::in); + ReadYuvFile(input, yuvFile, GetFileSize(inputWidth, inputHeight, GetImageType(pixelFormat))); + auto output = CreateSurfaceBuffer(GetImageType(pixelFormat), outputWidth, outputHeight); + RunWithSo(input, output, static_cast(levelToProcess)); + yuvFile->close(); + } + return 0; +} diff --git a/test/nativedemo/vpe_demo/include/vpe_demo.h b/test/nativedemo/vpe_demo/include/vpe_demo.h new file mode 100644 index 0000000000000000000000000000000000000000..0cc95b1f2e1a235eb19af4bb664089ca95b705ef --- /dev/null +++ b/test/nativedemo/vpe_demo/include/vpe_demo.h @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_DEMO_H +#define VPE_DEMO_H +#endif // VPE_DEMO_H \ No newline at end of file diff --git a/test/nativedemo/vpe_demo/vpe_demo.cpp b/test/nativedemo/vpe_demo/vpe_demo.cpp new file mode 100644 index 0000000000000000000000000000000000000000..de9b8104dea3ea5cc93b14f2ebacce0ca322af4e --- /dev/null +++ b/test/nativedemo/vpe_demo/vpe_demo.cpp @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include "csc_sample.h" +#include "csc_sample_define.h" +#include "graphic_common_c.h" +#include "algorithm_common.h" +#include "algorithm_errors.h" + +using namespace OHOS; +using namespace Media; +using namespace VideoProcessingEngine; +using namespace CSCSampleDefine; + +namespace { +struct DemoArg { + int32_t repeatTimes = 1; + bool needPrintMetadata = true; + bool needDumpOutputBuffer = false; +}; + +void GetCmdArg(int argc, char* argv[], DemoArg &demoArg) +{ + constexpr int32_t maxRepeatTimes = 1000; + constexpr int8_t needDumpOutputBufferOffset = 4; + constexpr int8_t needPrintMetadataOffset = 3; + constexpr int8_t repeatTimesOffset = 2; + switch (argc) { + case needDumpOutputBufferOffset: + demoArg.needDumpOutputBuffer = atoi(argv[needDumpOutputBufferOffset - 1]); + case needPrintMetadataOffset: + demoArg.needPrintMetadata = atoi(argv[needPrintMetadataOffset - 1]); + case repeatTimesOffset: + demoArg.repeatTimes = atoi(argv[repeatTimesOffset - 1]); + if (demoArg.repeatTimes > maxRepeatTimes || demoArg.repeatTimes < 1) { + printf("Demo repeats range: [1, %d], err input: %d, set 1\n", maxRepeatTimes, demoArg.repeatTimes); + demoArg.repeatTimes = 1; + } + default: + break; + } + printf("Dump output buffer: %s\n", demoArg.needDumpOutputBuffer ? "true" : "false"); + printf("Print metadata: %s\n", demoArg.needPrintMetadata ? "true" : "false"); + printf("Demo repeat times: %d\n", demoArg.repeatTimes); +} + +std::shared_ptr CreateColorSpaceConverter() +{ + auto csc = ColorSpaceConverter::Create(); + if (csc == nullptr) { + printf("Create colorspace converter failed"); + return nullptr; + } + ColorSpaceConverterParameter parameter; + parameter.renderIntent = RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC; + int32_t ret = csc->SetParameter(parameter); + printf("SetParameter %d\n", ret); + + return csc; +} +} + +int32_t main(int argc, char* argv[]) +{ + DemoArg demoArg; + GetCmdArg(argc, argv, demoArg); + + std::unique_ptr metadataFile = + std::make_unique(METADATA_FILE.data(), std::ios::binary | std::ios::in); + std::unique_ptr yuvFile = + std::make_unique(YUV_FILE.data(), std::ios::binary | std::ios::in); + auto csc = CreateColorSpaceConverter(); + + for (int times = 0; times < demoArg.repeatTimes; times++) { + std::vector> inputList(YUV_FILE_FRAME_NUM, + CreateSurfaceBuffer(YUV_FILE_PIXEL_FORMAT, WIDTH, HEIGHT)); + auto output = CreateSurfaceBuffer(OUTPUT_PIXEL_FORMAT, WIDTH, HEIGHT); + SetMeatadata(output, OUTPUT_COLORSPACE_INFO); + SetMeatadata(output, CM_METADATA_NONE); + + int frame = times * 2; + for (auto &input : inputList) { + SetMeatadata(input, INPUT_COLORSPACE_INFO); + SetMeatadata(input, CM_VIDEO_HDR_VIVID); + SetMeatadata(input, metadataFile); + ReadYuvFile(input, yuvFile, ONE_FRAME_SIZE); + + int32_t ret = csc->Process(input, output); + printf("frame: %4d, csc process, ret: %d\n", ++frame, ret); + + if (demoArg.needPrintMetadata) { + PrintMetadataType(input, ATTRKEY_COLORSPACE_TYPE); + PrintMetadataType(input, ATTRKEY_HDR_METADATA_TYPE); + PrintMetadataType(output, ATTRKEY_COLORSPACE_TYPE); + PrintMetadataType(output, ATTRKEY_HDR_METADATA_TYPE); + PrintMetadataKey(input); + PrintMetadataKey(output); + } + if (demoArg.needDumpOutputBuffer) { + std::unique_ptr outputImage = std::make_unique( + "frame_output_" + std::to_string(frame), std::ios::binary | std::ios::out | std::ios::trunc); + outputImage->write(static_cast(output->GetVirAddr()), output->GetSize()); + } + } + metadataFile->seekg(0); + yuvFile->seekg(0); + } + return 0; +} diff --git a/test/ndk/BUILD.gn b/test/ndk/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..07807777f20647c822801f41424419be5a0ba8fe --- /dev/null +++ b/test/ndk/BUILD.gn @@ -0,0 +1,31 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") + +group("vpe_module_test") { + testonly = true + deps = [] + if (!use_libfuzzer) { + deps += [ "moduletest/video:vpe_video_native_module_test" ] + deps += [ "nativedemo:vpe_video_nativedemo" ] + } +} + +group("vpe_fuzz_test") { + testonly = true + deps = [] + + deps += [ "fuzztest/videometadata_fuzzer:VideoMetadataFuzzTest" ] +} \ No newline at end of file diff --git a/test/ndk/moduletest/common/enum_list.h b/test/ndk/moduletest/common/enum_list.h new file mode 100644 index 0000000000000000000000000000000000000000..3a498a04ac7c246c2d033b705214abaa26cecf43 --- /dev/null +++ b/test/ndk/moduletest/common/enum_list.h @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ENUM_LIST_H +#define ENUM_LIST_H +#include "native_buffer.h" + +int32_t NativeBuffer_ColorSpace[] = { + OH_COLORSPACE_NONE, + OH_COLORSPACE_BT601_EBU_FULL, + OH_COLORSPACE_BT601_SMPTE_C_FULL, + OH_COLORSPACE_BT709_FULL, + OH_COLORSPACE_BT2020_HLG_FULL, + OH_COLORSPACE_BT2020_PQ_FULL, + OH_COLORSPACE_BT601_EBU_LIMIT, + OH_COLORSPACE_BT601_SMPTE_C_LIMIT, + OH_COLORSPACE_BT709_LIMIT, + OH_COLORSPACE_BT2020_HLG_LIMIT, + OH_COLORSPACE_BT2020_PQ_LIMIT, + OH_COLORSPACE_SRGB_FULL, + OH_COLORSPACE_P3_FULL, + OH_COLORSPACE_P3_HLG_FULL, + OH_COLORSPACE_P3_PQ_FULL, + OH_COLORSPACE_ADOBERGB_FULL, + OH_COLORSPACE_SRGB_LIMIT, + OH_COLORSPACE_P3_LIMIT, + OH_COLORSPACE_P3_HLG_LIMIT, + OH_COLORSPACE_P3_PQ_LIMIT, + OH_COLORSPACE_ADOBERGB_LIMIT, + OH_COLORSPACE_LINEAR_SRGB, + OH_COLORSPACE_LINEAR_BT709, + OH_COLORSPACE_LINEAR_P3, + OH_COLORSPACE_LINEAR_BT2020, + OH_COLORSPACE_DISPLAY_SRGB, + OH_COLORSPACE_DISPLAY_P3_SRGB, + OH_COLORSPACE_DISPLAY_P3_HLG, + OH_COLORSPACE_DISPLAY_P3_PQ, + OH_COLORSPACE_DISPLAY_BT2020_SRGB, + OH_COLORSPACE_DISPLAY_BT2020_HLG, + OH_COLORSPACE_DISPLAY_BT2020_PQ +}; + +int32_t NativeBuffer_Format[] = { + NATIVEBUFFER_PIXEL_FMT_CLUT8, + NATIVEBUFFER_PIXEL_FMT_CLUT1, + NATIVEBUFFER_PIXEL_FMT_CLUT4, + NATIVEBUFFER_PIXEL_FMT_RGB_565, + NATIVEBUFFER_PIXEL_FMT_RGBA_5658, + NATIVEBUFFER_PIXEL_FMT_RGBX_4444, + NATIVEBUFFER_PIXEL_FMT_RGBA_4444, + NATIVEBUFFER_PIXEL_FMT_RGB_444, + NATIVEBUFFER_PIXEL_FMT_RGBX_5551, + NATIVEBUFFER_PIXEL_FMT_RGBA_5551, + NATIVEBUFFER_PIXEL_FMT_RGB_555, + NATIVEBUFFER_PIXEL_FMT_RGBX_8888, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888, + NATIVEBUFFER_PIXEL_FMT_RGB_888, + NATIVEBUFFER_PIXEL_FMT_BGR_565, + NATIVEBUFFER_PIXEL_FMT_BGRX_4444, + NATIVEBUFFER_PIXEL_FMT_BGRA_4444, + NATIVEBUFFER_PIXEL_FMT_BGRX_5551, + NATIVEBUFFER_PIXEL_FMT_BGRA_5551, + NATIVEBUFFER_PIXEL_FMT_BGRX_8888, + NATIVEBUFFER_PIXEL_FMT_BGRA_8888, + NATIVEBUFFER_PIXEL_FMT_YUV_422_I, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_YCBCR_422_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_422_P, + NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P, + NATIVEBUFFER_PIXEL_FMT_YCRCB_420_P, + NATIVEBUFFER_PIXEL_FMT_YUYV_422_PKG, + NATIVEBUFFER_PIXEL_FMT_UYVY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_YVYU_422_PKG, + NATIVEBUFFER_PIXEL_FMT_VYUY_422_PKG, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, + NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RAW10, + NATIVEBUFFER_PIXEL_FMT_VENDER_MASK, + NATIVEBUFFER_PIXEL_FMT_BUTT +}; + +int32_t NativeBuffer_MetadataType[] = { + OH_VIDEO_HDR_HLG, + OH_VIDEO_HDR_HDR10, + OH_VIDEO_HDR_VIVID, + OH_VIDEO_HDR_NONE +}; + +std::unordered_map metadataString = { + {OH_VIDEO_HDR_HLG, "HDR_HLG"}, + {OH_VIDEO_HDR_HDR10, "HDR_HDR10"}, + {OH_VIDEO_HDR_VIVID, "HDR_VIVID"}, + {OH_VIDEO_HDR_NONE, "NONE"} +}; + +std::unordered_map formatString = { + {NATIVEBUFFER_PIXEL_FMT_CLUT8, "CLUT8"}, + {NATIVEBUFFER_PIXEL_FMT_CLUT1, "CLUT1"}, + {NATIVEBUFFER_PIXEL_FMT_CLUT4, "CLUT4"}, + {NATIVEBUFFER_PIXEL_FMT_RGB_565, "RGB_565"}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_5658, "RGBA_5658"}, + {NATIVEBUFFER_PIXEL_FMT_RGBX_4444, "RGBX_4444"}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_4444, "RGBA_4444"}, + {NATIVEBUFFER_PIXEL_FMT_RGB_444, "RGB_444"}, + {NATIVEBUFFER_PIXEL_FMT_RGBX_5551, "RGBX_5551"}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_5551, "RGBA_5551"}, + {NATIVEBUFFER_PIXEL_FMT_RGB_555, "RGB_555"}, + {NATIVEBUFFER_PIXEL_FMT_RGBX_8888, "RGBX_8888"}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_8888, "RGBA_8888"}, + {NATIVEBUFFER_PIXEL_FMT_RGB_888, "RGB_888"}, + {NATIVEBUFFER_PIXEL_FMT_BGR_565, "BGR_565"}, + {NATIVEBUFFER_PIXEL_FMT_BGRX_4444, "BGRX_4444"}, + {NATIVEBUFFER_PIXEL_FMT_BGRA_4444, "BGRA_4444"}, + {NATIVEBUFFER_PIXEL_FMT_BGRX_5551, "BGRX_5551"}, + {NATIVEBUFFER_PIXEL_FMT_BGRA_5551, "BGRA_5551"}, + {NATIVEBUFFER_PIXEL_FMT_BGRX_8888, "BGRX_8888"}, + {NATIVEBUFFER_PIXEL_FMT_BGRA_8888, "BGRA_8888"}, + {NATIVEBUFFER_PIXEL_FMT_YUV_422_I, "YUV_422_I"}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_422_SP, "YCBCR_422_SP"}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_422_SP, "YCRCB_422_SP"}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, "YCBCR_420_SP"}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, "YCRCB_420_SP"}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_422_P, "YCBCR_422_P"}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_422_P, "YCRCB_422_P"}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_P, "YCBCR_420_P"}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_420_P, "YCRCB_420_P"}, + {NATIVEBUFFER_PIXEL_FMT_YUYV_422_PKG, "YUYV_422_PKG"}, + {NATIVEBUFFER_PIXEL_FMT_UYVY_422_PKG, "UYVY_422_PKG"}, + {NATIVEBUFFER_PIXEL_FMT_YVYU_422_PKG, "YVYU_422_PKG"}, + {NATIVEBUFFER_PIXEL_FMT_VYUY_422_PKG, "VYUY_422_PKG"}, + {NATIVEBUFFER_PIXEL_FMT_RGBA_1010102, "RGBA_1010102"}, + {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, "YCBCR_P010"}, + {NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, "YCRCB_P010"}, + {NATIVEBUFFER_PIXEL_FMT_RAW10, "RAW10"}, + {NATIVEBUFFER_PIXEL_FMT_VENDER_MASK, "VENDER_MASK"}, + {NATIVEBUFFER_PIXEL_FMT_BUTT, "BUTT"} +}; + +std::unordered_map colorString = { + {OH_COLORSPACE_NONE, "NONE"}, + {OH_COLORSPACE_BT601_EBU_FULL, "BT601_EBU_FULL"}, + {OH_COLORSPACE_BT601_SMPTE_C_FULL, "BT601_SMPTC_FULL"}, + {OH_COLORSPACE_BT709_FULL, "BT709_FULL"}, + {OH_COLORSPACE_BT2020_HLG_FULL, "BT2020_HLG_FULL"}, + {OH_COLORSPACE_BT2020_PQ_FULL, "BT2020_PQ_FULL"}, + {OH_COLORSPACE_BT601_EBU_LIMIT, "BT601_EBU_LIMIT"}, + {OH_COLORSPACE_BT601_SMPTE_C_LIMIT, "BT601_SMPTC_LIMIT"}, + {OH_COLORSPACE_BT709_LIMIT, "BT709_LIMIT"}, + {OH_COLORSPACE_BT2020_HLG_LIMIT, "BT2020_HLG_LIMIT"}, + {OH_COLORSPACE_BT2020_PQ_LIMIT, "BT2020_PQ_LIMIT"}, + {OH_COLORSPACE_SRGB_FULL, "SRGB_FULL"}, + {OH_COLORSPACE_P3_FULL, "P3_FULL"}, + {OH_COLORSPACE_P3_HLG_FULL, "P3_HLG_FULL"}, + {OH_COLORSPACE_P3_PQ_FULL, "P3_PQ_FULL"}, + {OH_COLORSPACE_ADOBERGB_FULL, "ADOBERGB_FULL"}, + {OH_COLORSPACE_SRGB_LIMIT, "SRGB_LIMIT"}, + {OH_COLORSPACE_P3_LIMIT, "P3_LIMIT"}, + {OH_COLORSPACE_P3_HLG_LIMIT, "P3_HLG_LIMIT"}, + {OH_COLORSPACE_P3_PQ_LIMIT, "P3_PQ_LIMIT"}, + {OH_COLORSPACE_ADOBERGB_LIMIT, "ADOBERGB_LIMIT"}, + {OH_COLORSPACE_LINEAR_SRGB, "LINEAR_SRGB"}, + {OH_COLORSPACE_LINEAR_BT709, "LINEAR_BT709"}, + {OH_COLORSPACE_LINEAR_P3, "LINEAR_P3"}, + {OH_COLORSPACE_LINEAR_BT2020, "LINEAR_BT2020"}, + {OH_COLORSPACE_DISPLAY_SRGB, "DISPLAY_SRGB"}, + {OH_COLORSPACE_DISPLAY_P3_SRGB, "DISPLAY_P3_SRGB"}, + {OH_COLORSPACE_DISPLAY_P3_HLG, "DISPLAY_P3_HLG"}, + {OH_COLORSPACE_DISPLAY_P3_PQ, "DISPLAY_P3_PQ"}, + {OH_COLORSPACE_DISPLAY_BT2020_SRGB, "DISPLAY_BT2020_SRGB"}, + {OH_COLORSPACE_DISPLAY_BT2020_HLG, "DISPLAY_BT2020_HLG"}, + {OH_COLORSPACE_DISPLAY_BT2020_PQ, "DISPLAY_BT2020_PQ"} +}; + +#endif \ No newline at end of file diff --git a/test/ndk/moduletest/video/BUILD.gn b/test/ndk/moduletest/video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..0ecdc806af717a6f127af1c2c64c22be74f423b5 --- /dev/null +++ b/test/ndk/moduletest/video/BUILD.gn @@ -0,0 +1,82 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/media_foundation/config.gni") +import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") + +video_moduletest_native_include_dirs = [ + "$vpe_interface_capi_dir", + "$vpe_capi_root_dir/test/moduletest/common", + "$vpe_capi_root_dir/../../../graphic/graphic_2d/interfaces/inner_api", + "$vpe_capi_root_dir/../../../window/window_manager/interfaces/innerkits", + "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c", + "$vpe_capi_root_dir/../interface/kits/c", +] + +video_moduletest_cflags = [ + "-std=c++17", + "-fno-rtti", + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-strong", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-fvisibility=hidden", + "-Wformat=2", + "-Wdate-time", + "-Werror", + "-Wextra", + "-Wimplicit-fallthrough", + "-Wsign-compare", + "-Wunused-parameter", +] + +################################################################################################################## +ohos_unittest("vpe_video_native_module_test") { + module_out_path = "media_foundation/moduletest" + include_dirs = video_moduletest_native_include_dirs + include_dirs += [ "./" ] + cflags = video_moduletest_cflags + + sources = [ + "api_test.cpp", + "capability_test.cpp", + "func_test.cpp", + "reli_test.cpp", + "video_sample.cpp", + ] + + deps = [ + "$vpe_capi_root_dir/framework:video_processing", + "$vpe_capi_root_dir/../../av_codec/interfaces/inner_api/native:av_codec_client", + "$vpe_capi_root_dir/../../av_codec/interfaces/kits/c:capi_packages", + "$vpe_capi_root_dir/../../av_codec/services/services:av_codec_service", + ] + + external_deps = [ + "c_utils:utils", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "graphic_surface:surface", + "hilog:libhilog", + "ipc:ipc_core", + "media_foundation:media_foundation", + "media_foundation:native_media_core", + "window_manager:libwm", + ] +} diff --git a/test/ndk/moduletest/video/api_test.cpp b/test/ndk/moduletest/video/api_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2f13eb69fa22e5593149c0f679dc76abcf52f874 --- /dev/null +++ b/test/ndk/moduletest/video/api_test.cpp @@ -0,0 +1,1033 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include "gtest/gtest.h" +#include "video_processing.h" +#include "surface/native_buffer.h" +#include "iconsumer_surface.h" +#include "surface/window.h" +#include "nocopyable.h" +using namespace std; +using namespace OHOS; +using namespace testing::ext; +namespace { +class VpeVideoApiTest : public testing::Test { +public: + // SetUpTestCase: Called before all test cases + static void SetUpTestCase(void); + // TearDownTestCase: Called after all test case + static void TearDownTestCase(void); + // SetUp: Called before each test cases + void SetUp(void); + // TearDown: Called after each test cases + void TearDown(void); +}; + +class TestConsumerListener : public IBufferConsumerListener { +public: + TestConsumerListener(sptr cs, std::string_view name); + ~TestConsumerListener(); + void OnBufferAvailable() override; +}; + +void VpeVideoApiTest::SetUpTestCase() {} +void VpeVideoApiTest::TearDownTestCase() {} +void VpeVideoApiTest::SetUp() {} +void VpeVideoApiTest::TearDown() +{ + OH_VideoProcessing_DeinitializeEnvironment(); +} + +TestConsumerListener::TestConsumerListener(sptr cs, std::string_view name) {} + +TestConsumerListener::~TestConsumerListener() {} + +void TestConsumerListener::OnBufferAvailable() {} + +const VideoProcessing_ColorSpaceInfo SRC_INFO = {OH_VIDEO_HDR_VIVID, + OH_COLORSPACE_BT2020_HLG_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; +const VideoProcessing_ColorSpaceInfo DST_INFO = {OH_VIDEO_HDR_VIVID, + OH_COLORSPACE_BT2020_PQ_LIMIT, + NATIVEBUFFER_PIXEL_FMT_YCBCR_P010}; + +} + +namespace { +static int32_t g_userValue = 1; +static int32_t g_Index = 1; + +static void onErrorEmptyCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, + void* userData) +{ + //do nothing +} + +static void onErrorCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, + void* userData) +{ + cout << "onErrorCallback" << endl; +} + +static void onStateEmptyCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, + void* userData) +{ + //do nothing +} + +static void onStateCallback(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, + void* userData) +{ + cout << "onStateCallback" << endl; +} + + +static void OnNewOutputBufferEmptyCallback(OH_VideoProcessing* videoProcessor, uint32_t index, + void* userData) +{ + //do nothing +} + +static void OnNewOutputBufferCallback(OH_VideoProcessing* videoProcessor, uint32_t index, + void* userData) +{ + cout << "OnNewOutputBufferCallback" << endl; +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0010 + * @tc.name : first call OH_VideoProcessing_InitializeEnvironment + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0010, TestSize.Level0) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0020 + * @tc.name : first call OH_VideoProcessing_DeinitializeEnvironment + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0020, TestSize.Level0) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_DeinitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0030 + * @tc.name : call OH_VideoProcessing_DeinitializeEnvironment after initialize + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0030, TestSize.Level0) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0040 + * @tc.name : call OH_VideoProcessing_IsColorSpaceConversionSupported with nullptr,nullptr + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0040, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); + ASSERT_FALSE(ret); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0050 + * @tc.name : call OH_VideoProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0050, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, nullptr); + ASSERT_FALSE(ret); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0060 + * @tc.name : call OH_VideoProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0060, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, &DST_INFO); + ASSERT_FALSE(ret); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0070 + * @tc.name : call OH_VideoProcessing_IsColorSpaceConversionSupported + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0070, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO); + if (!access("/system/lib64/", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0080 + * @tc.name : call OH_VideoProcessing_IsMetadataGenerationSupported + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0080, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(nullptr); + ASSERT_FALSE(ret); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0090 + * @tc.name : call OH_VideoProcessing_IsMetadataGenerationSupported + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0090, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + bool ret = OH_VideoProcessing_IsMetadataGenerationSupported(&SRC_INFO); + if (!access("/system/lib64/", 0)) { + ASSERT_TRUE(ret); + } else { + ASSERT_FALSE(ret); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0100 + * @tc.name : call OH_VideoProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0100, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing** videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, INT_MAX); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0110 + * @tc.name : call OH_VideoProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0110, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing** videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0120 + * @tc.name : call OH_VideoProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0120, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, INT_MAX); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + OH_VideoProcessing_Destroy(videoProcessor); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0130 + * @tc.name : call OH_VideoProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0130, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0140 + * @tc.name : call OH_VideoProcessing_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0140, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Destroy(nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0150 + * @tc.name : call OH_VideoProcessing_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0150, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0160 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0160, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RegisterCallback(nullptr, nullptr, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0170 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0170, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(nullptr, callback, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0180 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0180, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, nullptr, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0190 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0190, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + OH_VideoProcessingCallback_Destroy(callback); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0200 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0200, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_VALUE); + OH_VideoProcessing_Destroy(videoProcessor); + OH_VideoProcessingCallback_Destroy(callback); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0210 + * @tc.name : call OH_VideoProcessing_RegisterCallback + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0210, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + void* userData = &g_userValue; + if (!userData) { + ret = OH_VideoProcessing_RegisterCallback(videoProcessor, callback, userData); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + OH_VideoProcessing_Destroy(videoProcessor); + OH_VideoProcessingCallback_Destroy(callback); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0220 + * @tc.name : call OH_VideoProcessing_SetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0220, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0230 + * @tc.name : call OH_VideoProcessing_SetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0230, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_SetSurface(nullptr, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + OH_NativeWindow_DestroyNativeWindow(window); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0240 + * @tc.name : call OH_VideoProcessing_SetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0240, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_SetSurface(videoProcessor, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0250 + * @tc.name : call OH_VideoProcessing_SetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0250, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0260 + * @tc.name : call OH_VideoProcessing_GetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0260, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0270 + * @tc.name : call OH_VideoProcessing_GetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0270, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_GetSurface(nullptr, &window); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); + OH_NativeWindow_DestroyNativeWindow(window); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0280 + * @tc.name : call OH_VideoProcessing_GetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0280, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_GetSurface(videoProcessor, nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0290 + * @tc.name : call OH_VideoProcessing_GetSurface + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0290, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_GetSurface(videoProcessor, &window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0300 + * @tc.name : call OH_VideoProcessing_Start + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0300, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Start(nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0310 + * @tc.name : call OH_VideoProcessing_Start + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0310, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Start(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0320 + * @tc.name : call OH_VideoProcessing_Start + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0320, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Start(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0330 + * @tc.name : call OH_VideoProcessing_Stop + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0330, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Stop(nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0340 + * @tc.name : call OH_VideoProcessing_Stop + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0340, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Stop(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0350 + * @tc.name : call OH_VideoProcessing_Stop + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0350, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + sptr cs = Surface::CreateSurfaceAsConsumer(); + sptr listener = new TestConsumerListener( + cs, "/data/test/media/out_320_240_10s.rgba"); + cs->RegisterConsumerListener(listener); + auto p = cs->GetProducer(); + sptr ps = Surface::CreateSurfaceAsProducer(p); + OHNativeWindow *window = nullptr; + window = CreateNativeWindowFromSurface(&ps); + ret = OH_VideoProcessing_SetSurface(videoProcessor, window); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OHNativeWindow *outwindow = nullptr; + ret = OH_VideoProcessing_GetSurface(videoProcessor, &outwindow); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Start(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessing_Stop(videoProcessor); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + OH_VideoProcessing_Destroy(videoProcessor); + OH_NativeWindow_DestroyNativeWindow(window); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0360 + * @tc.name : call OH_VideoProcessing_RenderOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0360, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(nullptr, g_Index); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_INSTANCE); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0370 + * @tc.name : call OH_VideoProcessing_RenderOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0370, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_METADATA_GENERATION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, INT_MAX); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0380 + * @tc.name : call OH_VideoProcessingCallback_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0380, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0390 + * @tc.name : call OH_VideoProcessingCallback_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0390, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0400 + * @tc.name : call OH_VideoProcessingCallback_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0400, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Destroy(nullptr); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0410 + * @tc.name : call OH_VideoProcessingCallback_Destroy + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0410, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_Destroy(callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0420 + * @tc.name : call OH_VideoProcessingCallback_BindOnError + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0420, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( + nullptr, onErrorEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0430 + * @tc.name : call OH_VideoProcessingCallback_BindOnError + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0430, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError( + callback, onErrorEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0440 + * @tc.name : call OH_VideoProcessingCallback_BindOnError + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0440, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnError( + nullptr, onErrorCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0450 + * @tc.name : call OH_VideoProcessingCallback_BindOnError + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0450, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, onErrorCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0460 + * @tc.name : call OH_VideoProcessingCallback_BindOnState + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0460, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState( + nullptr, onStateEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0470 + * @tc.name : call OH_VideoProcessingCallback_BindOnState + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0470, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0480 + * @tc.name : call OH_VideoProcessingCallback_BindOnState + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0480, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnState(nullptr, onStateCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0490 + * @tc.name : call OH_VideoProcessingCallback_BindOnState + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0490, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, onStateCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0500 + * @tc.name : call OH_VideoProcessingCallback_BindOnNewOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0500, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( + nullptr, OnNewOutputBufferEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0510 + * @tc.name : call OH_VideoProcessingCallback_BindOnNewOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0510, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferEmptyCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0520 + * @tc.name : call OH_VideoProcessingCallback_BindOnNewOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0520, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer( + nullptr, OnNewOutputBufferCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_ERROR_INVALID_PARAMETER); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_0530 + * @tc.name : call OH_VideoProcessingCallback_BindOnNewOutputBuffer + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0530, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Create(&callback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBufferCallback); + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_Destroy(callback); +} + +/** + * @tc.number : VPE_VIDEO_API_TEST_05400 + * @tc.name : call OH_VideoProcessing_Create + * @tc.desc : function test + */ +HWTEST_F(VpeVideoApiTest, VPE_VIDEO_API_TEST_0540, TestSize.Level0) +{ + OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* videoProcessor = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Create(&videoProcessor, + VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + if (!access("/system/lib64/", 0)) { + ASSERT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(videoProcessor); + } else { + ASSERT_NE(ret, VIDEO_PROCESSING_SUCCESS); + } +} +} \ No newline at end of file diff --git a/test/unittest/aihdr_enhancer/BUILD.gn b/test/unittest/aihdr_enhancer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..cc70cda5a329415b36e8ab268cb36a18f92397c3 --- /dev/null +++ b/test/unittest/aihdr_enhancer/BUILD.gn @@ -0,0 +1,55 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("aihdr_enhancer_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$ALGORITHM_DIR/aihdr_enhancer/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + ] + + sources = [ "aihdr_enhancer_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/aihdr_enhancer/aihdr_enhancer_unit_test.cpp b/test/unittest/aihdr_enhancer/aihdr_enhancer_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..bfe28f3a189004e2d3dc205ced940a5a2d07c7c2 --- /dev/null +++ b/test/unittest/aihdr_enhancer/aihdr_enhancer_unit_test.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "graphic_common_c.h" +#include "aihdr_enhancer_fwk.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) +{ + auto buffer = SurfaceBuffer::Create(); + if (nullptr == buffer) { + printf("Create surface buffer failed\n"); + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = width; + inputCfg.height = height; + inputCfg.strideAlignment = width; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA; + inputCfg.format = pixelFormat; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + if (GSERROR_OK != err) { + printf("Alloc surface buffer failed\n"); + return nullptr; + } + return buffer; +} + +class AihdrEnhancerUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); +}; + +void AihdrEnhancerUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void AihdrEnhancerUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void AihdrEnhancerUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void AihdrEnhancerUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +// aihdr enhancer create +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_create_01, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + EXPECT_NE(aihdrEnh, nullptr); +} + +// aihdr enhancer create meultiple times +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_create_02, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + aihdrEnh = AihdrEnhancer::Create(); + aihdrEnh = AihdrEnhancer::Create(); + aihdrEnh = AihdrEnhancer::Create(); + aihdrEnh = AihdrEnhancer::Create(); + EXPECT_NE(aihdrEnh, nullptr); +} + +// set parameter +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_setparameter_01, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + int param = 1; + auto res = aihdrEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// get parameter +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_getparameter_01, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + int param = 1; + int param_temp = 0; + auto res_set = aihdrEnh->SetParameter(param); + auto res_get = aihdrEnh->GetParameter(param_temp); + EXPECT_EQ(res_set, VPE_ALGO_ERR_OK); + EXPECT_EQ(res_get, VPE_ALGO_ERR_OK); + EXPECT_EQ(param, param_temp); +} + +// process +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_process_01, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + int param = 1; + aihdrEnh->SetParameter(param); + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + sptr input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto ret = aihdrEnh->Process(input); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process +HWTEST_F(AihdrEnhancerUnitTest, aihdrenhancer_process_02, TestSize.Level1) +{ + auto aihdrEnh = AihdrEnhancer::Create(); + int param = 1; + aihdrEnh->SetParameter(param); + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + sptr input = CreateSurfaceBuffer(inputFormat, 1024, 768); + input = nullptr; + auto ret = aihdrEnh->Process(input); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_VAL); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/aihdr_enhancer_video/BUILD.gn b/test/unittest/aihdr_enhancer_video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..23bd14ab872ac53987bb7cf1517581f054ccb96f --- /dev/null +++ b/test/unittest/aihdr_enhancer_video/BUILD.gn @@ -0,0 +1,60 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("aihdr_enhancer_video_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + #"$ALGORITHM_DIR/detail_enhancer/include", + "$ALGORITHM_DIR/aihdr_enhancer/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$ALGORITHM_DIR/aihdr_enhancer_video/include", + "$TEST_UTILS_PATH/DetailEnhancer/sample/video", + ] + + sources = [ "aihdr_enhancer_video_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:media_foundation", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp b/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3e6aa6f38fc8b693fec44cf2e83b2f55143eaf1b --- /dev/null +++ b/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp @@ -0,0 +1,404 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" + +#include "aihdr_enhancer_video_impl.h" +#include "aihdr_enhancer_video.h" +#include "surface/window.h" +#include "external_window.h" + +using namespace std; +using namespace testing::ext; +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +std::shared_ptr aev = nullptr; +class AihdrEnhancerVideoCallbackImpl : public AihdrEnhancerVideoCallback { +public: + AihdrEnhancerVideoCallbackImpl() = default; + ~AihdrEnhancerVideoCallbackImpl() override = default; + AihdrEnhancerVideoCallbackImpl(const AihdrEnhancerVideoCallbackImpl&) = delete; + AihdrEnhancerVideoCallbackImpl& operator=(const AihdrEnhancerVideoCallbackImpl&) = delete; + AihdrEnhancerVideoCallbackImpl(AihdrEnhancerVideoCallbackImpl&&) = delete; + AihdrEnhancerVideoCallbackImpl& operator=(AihdrEnhancerVideoCallbackImpl&&) = delete; + + void OnError(int32_t errorCode) override; + void OnState(int32_t state) override; + void OnOutputBufferAvailable(uint32_t index, AihdrEnhancerBufferFlag flag) override; +}; +void AihdrEnhancerVideoCallbackImpl::OnOutputBufferAvailable(uint32_t index, AihdrEnhancerBufferFlag flag) +{ + if (aev != nullptr) { + aev->ReleaseOutputBuffer(index, flag); + } +} +void AihdrEnhancerVideoCallbackImpl::OnError(int32_t errorCode) +{ + (void)errorCode; +} +void AihdrEnhancerVideoCallbackImpl::OnState(int32_t state) +{ + (void)state; +} + +class AihdrEnhancerVideoUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); + sptr surface; + OHNativeWindow *nativeWindow; + uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer); +}; + +void AihdrEnhancerVideoUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void AihdrEnhancerVideoUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void AihdrEnhancerVideoUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void AihdrEnhancerVideoUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +int64_t GetSystemTime() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} + +uint32_t AihdrEnhancerVideoUnitTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) +{ + struct Region region; + struct Region::Rect *rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = DEFAULT_WIDTH; + rect->h = DEFAULT_HEIGHT; + region.rects = rect; + NativeWindowHandleOpt(nativeWindow, SET_UI_TIMESTAMP, GetSystemTime()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(nativeWindow, ohNativeWindowBuffer, -1, region); + delete rect; + if (err != 0) { + cout << "FlushBuffer failed" << endl; + return 1; + } + return 0; +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_init_01, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + EXPECT_NE(aihdrEnhancerVideo, nullptr); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_02, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = nullptr; + auto ret = aihdrEnhancerVideo->SetCallback(cb); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_03, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + OHNativeWindow* window{}; + auto ret = aihdrEnhancerVideo->GetSurface(&window); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_04, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + OHNativeWindow* window{}; + auto ret = aihdrEnhancerVideo->GetSurface(&window); + ret = aihdrEnhancerVideo->GetSurface(&window); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_05, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + OHNativeWindow* window{}; + auto ret = aihdrEnhancerVideo->SetSurface(window); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_06, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + + auto ret = aihdrEnhancerVideo->ReleaseOutputBuffer(0, true); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_07, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + auto ret = aihdrEnhancerVideo->NotifyEos(); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_08, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_09, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + auto res = aihdrEnhancerVideo->Stop(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_10, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + + int fenceFd = -1; + OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + auto ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_11, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->Stop(); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Stop(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_12, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->Stop(); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Stop(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_13, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + + auto aihdrEnhancerVideo3 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window3{}; + aihdrEnhancerVideo3->GetSurface(&window3); + + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->SetSurface(window3); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_14, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->Stop(); + res = aihdrEnhancerVideo->SetSurface(window2); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_15, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + + auto aihdrEnhancerVideo3 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window3{}; + aihdrEnhancerVideo3->GetSurface(&window3); + + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->Stop(); + res = aihdrEnhancerVideo->SetSurface(window3); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_16, TestSize.Level1) +{ + auto aihdrEnhancerVideo = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aihdrEnhancerVideo->SetCallback(cb); + aihdrEnhancerVideo->GetSurface(&nativeWindow); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aihdrEnhancerVideo->SetSurface(window2); + res = aihdrEnhancerVideo->Prepare(); + res = aihdrEnhancerVideo->Start(); + res = aihdrEnhancerVideo->ReleaseOutputBuffer(0, true); + + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_17, TestSize.Level1) +{ + std::queue> AppInBufferAvilQue; + sptr buffer; + int32_t fence = -1; + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = 32; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; + + aev = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = aev->SetCallback(cb); + OHNativeWindow* window1{}; + aev->GetSurface(&window1); + + auto aihdrEnhancerVideo2 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window2{}; + aihdrEnhancerVideo2->GetSurface(&window2); + + auto aihdrEnhancerVideo3 = AihdrEnhancerVideo::Create(); + OHNativeWindow* window3{}; + aihdrEnhancerVideo3->GetSurface(&window3); + + OH_NativeWindow_NativeWindowHandleOpt(window2, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + res = aev->SetSurface(window2); + res = aev->Prepare(); + res = aev->Start(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + for (int i = 0; i < 3; i++) { + OHNativeWindow* windowTmp = (i % 2) ? window2 : window3; + window1->surface->RequestBuffer(buffer, fence, requestCfg_); + AppInBufferAvilQue.push(buffer); + window1->surface->FlushBuffer(buffer, -1, flushCfg_); + aev->SetSurface(windowTmp); + aev->NotifyEos(); + sleep(2); + } + aihdrEnhancerVideo2->Release(); + aihdrEnhancerVideo3->Release(); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/colorSpace_converter_video_ndk/BUILD.gn b/test/unittest/colorSpace_converter_video_ndk/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..142740bf29833244edd24e3f0356257e127a88a7 --- /dev/null +++ b/test/unittest/colorSpace_converter_video_ndk/BUILD.gn @@ -0,0 +1,77 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("colorSpace_converter_video_ndk_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample", + "$ALGORITHM_DIR/colorspace_converter/include", + "$FRAMEWORK_DIR/capi/video_processing/include", + "$FRAMEWORK_DIR/capi/video_processing/colorspace_converter/include/", + "$ALGORITHM_DIR/colorspace_converter_video/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample/video", + "//foundation/multimedia/media_foundation/interface/kits/c/", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c/", + "//foundation/multimedia/media_foundation/video_processing_engine/", + "//foundation/multimedia/media_foundation/video_processing_engine/framework/dfx/include/", + "//foundation/multimedia/media_foundation/video_processing_engine/framework/capi/video_processing/include/", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/inner_api/", + "//foundation/multimedia/video_processing_engine/interfaces/inner_api/", + "//foundation/multimedia/video_processing_engine/framework/algorithm/colorspace_converter_video/include/", + "//foundation/graphic/graphic_2d/interfaces/inner_api/", + "//foundation/graphic/graphic_2d_ext/ohcore/graphic_compat_layer/include/utils/", + ] + + sources = [ "colorSpace_converter_video_ndk_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$FRAMEWORK_DIR:video_processing_capi_impl", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:native_media_core", + "media_foundation:video_processing", + "ipc:ipc_core", + "memory_utils:libdmabufheap", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ed67092d61d059175459bdb5f8fe825ea80ff090 --- /dev/null +++ b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp @@ -0,0 +1,570 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include "securec.h" +#include + +#include "native_avformat.h" +#include "native_window.h" +#include "surface/window.h" +#include "external_window.h" +#include "v1_0/cm_color_space.h" + +#include "video_processing.h" +#include "video_processing_types.h" +#include "colorspace_converter_video.h" +#include "colorspace_converter_video_description.h" +#include "algorithm_common.h" +#include "algorithm_common.h" + +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; +constexpr uint32_t DEFAULT_BYTE = 32; + +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { + +class ColorSpaceConverterVideoNdkImplUnitTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = DEFAULT_BYTE; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; + }; + void TearDown() + { + }; + + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + int32_t fence_ = -1; + CM_ColorSpaceType inColspc_ = CM_BT2020_PQ_LIMIT; + CM_HDR_Metadata_Type inMetaType_ = CM_VIDEO_HDR_VIVID; + GraphicPixelFormat inSurfacePixelFmt_ = GRAPHIC_PIXEL_FMT_YCBCR_P010; + OH_NativeBuffer_ColorSpace outColspcInfo_ = OH_COLORSPACE_BT709_LIMIT; + OH_NativeBuffer_MetadataType outMetaType_ = OH_VIDEO_HDR_VIVID; + OH_NativeBuffer_Format outSurfacePixelFmt_ = NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP; + + void SetMeatadata(sptr &buffer, uint32_t value); + void SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + VideoProcessing_ErrorCode ProcessToHDR(); + VideoProcessing_ErrorCode ProcessToSDR(); +}; + +void OnError(OH_VideoProcessing *handle, VideoProcessing_ErrorCode errorCode, void* userData) +{ + (void)handle; + (void)errorCode; + (void)userData; +} +void OnState(OH_VideoProcessing *handle, VideoProcessing_State state, void* userData) +{ + (void)handle; + (void)state; + (void)userData; +} +void OnNewOutputBuffer(OH_VideoProcessing *handle, uint32_t index, void* userData) +{ + (void)handle; + (void)index; + (void)userData; +} + +void ColorSpaceConverterVideoNdkImplUnitTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + uint32_t err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + if (err != 0) { + printf("Buffer set metadata info, ret: %d\n", err); + } +} + +void ColorSpaceConverterVideoNdkImplUnitTest::SetMeatadata(sptr &buffer, + CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + uint32_t err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + if (err != 0) { + printf("Buffer set colorspace info, ret: %d\n", err); + } +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNdkImplUnitTest::ProcessToSDR() +{ + VideoProcessing_ErrorCode ret = VIDEO_PROCESSING_SUCCESS; + sptr buffer; + CM_ColorSpaceInfo inColspcInfo = { + static_cast((inColspc_ & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET), + static_cast((inColspc_ & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET), + static_cast((inColspc_ & MATRIX_MASK) >> MATRIX_OFFSET), + static_cast((inColspc_ & RANGE_MASK) >> RANGE_OFFSET) + }; + ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x1; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + int32_t err1 = OH_NativeWindow_NativeWindowHandleOpt(window2, SET_FORMAT, outSurfacePixelFmt_); + err1 = OH_NativeWindow_SetColorSpace(window2, outColspcInfo_); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + ret = OH_VideoProcessing_SetSurface(instance, window2); + ret = OH_VideoProcessing_Start(instance); + if (ret != VIDEO_PROCESSING_SUCCESS) { + return ret; + } + int videoSurfaceBuffNum = 1; + int sleepTime = 2; + for (int i = 0; i < videoSurfaceBuffNum; i++) { + window->surface->RequestBuffer(buffer, fence_, requestCfg_); + SetMeatadata(buffer, inColspcInfo); + SetMeatadata(buffer, (uint32_t)inMetaType_); + window->surface->FlushBuffer(buffer, fence_, flushCfg_); + OH_VideoProcessing_SetSurface(instance, window2); + sleep(sleepTime); + } + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_Destroy(instance2); + OH_VideoProcessing_DeinitializeEnvironment(); + return ret; +} + +VideoProcessing_ErrorCode ColorSpaceConverterVideoNdkImplUnitTest::ProcessToHDR() +{ + VideoProcessing_ErrorCode ret = VIDEO_PROCESSING_SUCCESS; + sptr buffer; + CM_ColorSpaceInfo inColspcInfo = { + static_cast((inColspc_ & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET), + static_cast((inColspc_ & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET), + static_cast((inColspc_ & MATRIX_MASK) >> MATRIX_OFFSET), + static_cast((inColspc_ & RANGE_MASK) >> RANGE_OFFSET) + }; + ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x1; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + int32_t err1 = OH_NativeWindow_NativeWindowHandleOpt(window2, SET_FORMAT, outSurfacePixelFmt_); + err1 = OH_NativeWindow_SetColorSpace(window2, outColspcInfo_); + uint8_t val = static_cast(outMetaType_); + err1 = OH_NativeWindow_SetMetadataValue(window2, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + ret = OH_VideoProcessing_SetSurface(instance, window2); + ret = OH_VideoProcessing_Start(instance); + if (ret != VIDEO_PROCESSING_SUCCESS) { + return ret; + } + int videoSurfaceBuffNum = 1; + int sleepTime = 2; + for (int i = 0; i < videoSurfaceBuffNum; i++) { + window->surface->RequestBuffer(buffer, fence_, requestCfg_); + SetMeatadata(buffer, inColspcInfo); + SetMeatadata(buffer, (uint32_t)inMetaType_); + window->surface->FlushBuffer(buffer, fence_, flushCfg_); + OH_VideoProcessing_SetSurface(instance, window2); + sleep(sleepTime); + } + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_Destroy(instance2); + OH_VideoProcessing_DeinitializeEnvironment(); + return ret; +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFunSupportedSDR2SDR, TestSize.Level1) +{ + bool resultSupported = false; + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); + EXPECT_EQ(resultSupported, false); + resultSupported = OH_VideoProcessing_IsMetadataGenerationSupported(nullptr); + EXPECT_EQ(resultSupported, false); + const int formatListNum = 3; + int formatListSDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; + const int colorMetaSdr2SdrListNum = 2; + const int colorMetaParaNum = 4; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutColorNum = 3; + int colorMetaListSdr2Sdr[colorMetaSdr2SdrListNum][colorMetaParaNum] = { + {CM_METADATA_NONE, OH_COLORSPACE_BT601_EBU_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT}, + {CM_METADATA_NONE, OH_COLORSPACE_BT601_SMPTE_C_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT} + }; + VideoProcessing_ColorSpaceInfo sourceVideoInfo = {-1, -1, -1}; + VideoProcessing_ColorSpaceInfo destinationVideoInfo = {-1, -1, -1}; + for (int n = 0; n < colorMetaSdr2SdrListNum; n++) { + sourceVideoInfo.colorSpace = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInColorNum]); + destinationVideoInfo.colorSpace = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + sourceVideoInfo.pixelFormat = static_cast(formatListSDRNative[i]); + for (int j = 0; j < formatListNum; j++) { + destinationVideoInfo.pixelFormat = static_cast(formatListSDRNative[j]); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, true); + } + } + } + sourceVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + sourceVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT2020_HLG_LIMIT); + sourceVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_P010); + destinationVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + destinationVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT2020_HLG_LIMIT); + destinationVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_P010); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, false); +} +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFunSupportedHDR2SDR, TestSize.Level1) +{ + bool resultSupported = false; + const int formatListNum = 3; + int formatListSDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; + int formatListHDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102}; + const int colorMetaHdr2SdrListNum = 2; + const int colorMetaParaNum = 4; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutColorNum = 3; + int colorMetaListHdr2Sdr[colorMetaHdr2SdrListNum][colorMetaParaNum] = { + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT}, + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_HLG_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT} + }; + VideoProcessing_ColorSpaceInfo sourceVideoInfo = {-1, -1, -1}; + VideoProcessing_ColorSpaceInfo destinationVideoInfo = {-1, -1, -1}; + for (int n = 0; n < colorMetaHdr2SdrListNum; n++) { + sourceVideoInfo.metadataType = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaInMetaNum]); + sourceVideoInfo.colorSpace = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaInColorNum]); + destinationVideoInfo.colorSpace = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + sourceVideoInfo.pixelFormat = static_cast(formatListHDRNative[i]); + for (int j = 0; j < formatListNum; j++) { + destinationVideoInfo.pixelFormat = static_cast(formatListSDRNative[j]); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, true); + } + } + } + sourceVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + sourceVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT2020_HLG_LIMIT); + sourceVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_P010); + destinationVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + destinationVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT2020_HLG_LIMIT); + destinationVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_P010); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, false); +} +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFunSupportedHDR2HDR, TestSize.Level1) +{ + bool resultSupported = false; + const int formatListNum = 3; + int formatListHDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102}; + const int colorMetaHdr2HdrListNum = 4; + const int colorMetaParaNum = 4; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutMetaNum = 2; + const int colorMetaParaOutColorNum = 3; + int colorMetaListHdr2Hdr[colorMetaHdr2HdrListNum][colorMetaParaNum] = { + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT, OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {OH_VIDEO_HDR_HDR10, OH_COLORSPACE_BT2020_PQ_LIMIT, OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT, OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_HLG_LIMIT, OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT} + }; + VideoProcessing_ColorSpaceInfo sourceVideoInfo = {-1, -1, -1}; + VideoProcessing_ColorSpaceInfo destinationVideoInfo = {-1, -1, -1}; + for (int n = 0; n < colorMetaHdr2HdrListNum; n++) { + sourceVideoInfo.metadataType = static_cast(colorMetaListHdr2Hdr[n][colorMetaParaInMetaNum]); + sourceVideoInfo.colorSpace = static_cast(colorMetaListHdr2Hdr[n][colorMetaParaInColorNum]); + destinationVideoInfo.metadataType = static_cast(colorMetaListHdr2Hdr[n][colorMetaParaOutMetaNum]); + destinationVideoInfo.colorSpace = static_cast(colorMetaListHdr2Hdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + sourceVideoInfo.pixelFormat = static_cast(formatListHDRNative[i]); + for (int j = 0; j < formatListNum; j++) { + destinationVideoInfo.pixelFormat = static_cast(formatListHDRNative[j]); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, true); + } + } + } + sourceVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + sourceVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT709_FULL); + sourceVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP); + destinationVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_VIVID); + destinationVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT709_FULL); + destinationVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP); + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported( + &sourceVideoInfo, &destinationVideoInfo); + EXPECT_EQ(resultSupported, false); +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + int createType = 0x1; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_AVFormat* parameter = OH_AVFormat_Create(); + ret = OH_VideoProcessing_SetParameter(instance, parameter); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessing_GetParameter(instance, parameter); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + for (int i = 0; i < 2; i++) { + ret = OH_VideoProcessing_Start(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Stop(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + ret = OH_VideoProcessing_RenderOutputBuffer(instance, 0); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessingCallback_Destroy(callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + ret = OH_VideoProcessing_Destroy(instance2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_sdr2sdr, TestSize.Level1) +{ + const int formatListNum = 3; + int formatListSDR[formatListNum] = {GRAPHIC_PIXEL_FMT_YCBCR_420_SP, GRAPHIC_PIXEL_FMT_YCRCB_420_SP, + GRAPHIC_PIXEL_FMT_RGBA_8888}; + int formatListSDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; + const int colorMetaSdr2SdrListNum = 2; + const int colorMetaParaNum = 4; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutColorNum = 3; + int colorMetaListSdr2Sdr[colorMetaSdr2SdrListNum][colorMetaParaNum] = { + {CM_METADATA_NONE, CM_BT601_EBU_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT}, + {CM_METADATA_NONE, CM_BT601_SMPTE_C_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT} + }; + for (int n = 0; n < colorMetaSdr2SdrListNum; n++) { + inMetaType_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInMetaNum]); + inColspc_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInColorNum]); + outColspcInfo_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + inSurfacePixelFmt_ = static_cast(formatListSDR[i]); + requestCfg_.format = inSurfacePixelFmt_; + for (int j = 0; j < formatListNum; j++) { + outSurfacePixelFmt_ = static_cast(formatListSDRNative[j]); + VideoProcessing_ErrorCode ret = ProcessToSDR(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + } + } +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_hdr2sdr, TestSize.Level1) +{ + const int formatListNum = 3; + int formatListHDR[formatListNum] = {GRAPHIC_PIXEL_FMT_YCBCR_P010, GRAPHIC_PIXEL_FMT_YCRCB_P010, + GRAPHIC_PIXEL_FMT_RGBA_1010102}; + int formatListSDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP, NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP, + NATIVEBUFFER_PIXEL_FMT_RGBA_8888}; + const int colorMetaHdr2SdrListNum = 2; + const int colorMetaParaNum = 4; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutColorNum = 3; + int colorMetaListHdr2Sdr[colorMetaHdr2SdrListNum][colorMetaParaNum] = { + {CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT}, + {CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, CM_METADATA_NONE, OH_COLORSPACE_BT709_LIMIT} + }; + for (int n = 0; n < colorMetaHdr2SdrListNum; n++) { + inMetaType_ = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaInMetaNum]); + inColspc_ = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaInColorNum]); + outColspcInfo_ = static_cast(colorMetaListHdr2Sdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + inSurfacePixelFmt_ = static_cast(formatListHDR[i]); + requestCfg_.format = inSurfacePixelFmt_; + for (int j = 0; j < formatListNum; j++) { + outSurfacePixelFmt_ = static_cast(formatListSDRNative[j]); + VideoProcessing_ErrorCode ret = ProcessToSDR(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + } + } +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_hdr2hdr, TestSize.Level1) +{ + const int formatListNum = 3; + int formatListHDR[formatListNum] = {GRAPHIC_PIXEL_FMT_YCBCR_P010, GRAPHIC_PIXEL_FMT_YCRCB_P010, + GRAPHIC_PIXEL_FMT_RGBA_1010102}; + int formatListHDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102}; + const int colorMetaHdr2HdrListNum = 4; + const int colorMetaParaNum = 4; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + const int colorMetaParaOutMetaNum = 2; + const int colorMetaParaOutColorNum = 3; + int colorMetaListSdr2Sdr[colorMetaHdr2HdrListNum][colorMetaParaNum] = { + {CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT, OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT, OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT, OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT} + }; + for (int n = 0; n < colorMetaHdr2HdrListNum; n++) { + inMetaType_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInMetaNum]); + inColspc_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInColorNum]); + outMetaType_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaOutMetaNum]); + outColspcInfo_ = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaOutColorNum]); + for (int i = 0; i < formatListNum; i++) { + inSurfacePixelFmt_ = static_cast(formatListHDR[i]); + requestCfg_.format = inSurfacePixelFmt_; + for (int j = 0; j < formatListNum; j++) { + outSurfacePixelFmt_ = static_cast(formatListHDRNative[j]); + VideoProcessing_ErrorCode ret = ProcessToHDR(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + } + } +} + +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, codeCoverageComponent_10, TestSize.Level1) +{ + inMetaType_ = static_cast(CM_IMAGE_HDR_ISO_SINGLE); + inColspc_ = static_cast(CM_DISPLAY_P3_HLG); + outColspcInfo_ = static_cast(OH_COLORSPACE_DISPLAY_P3_HLG); + inSurfacePixelFmt_ = static_cast(GRAPHIC_PIXEL_FMT_YUYV_422_PKG); + requestCfg_.format = inSurfacePixelFmt_; + outSurfacePixelFmt_ = static_cast(NATIVEBUFFER_PIXEL_FMT_YUYV_422_PKG); + VideoProcessing_ErrorCode ret = ProcessToSDR(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, codeCoverageComponent_20, TestSize.Level1) +{ + inMetaType_ = static_cast(CM_VIDEO_HDR_VIVID); + inColspc_ = static_cast(CM_BT2020_PQ_LIMIT); + outColspcInfo_ = static_cast(OH_COLORSPACE_BT709_LIMIT); + inSurfacePixelFmt_ = static_cast(GRAPHIC_PIXEL_FMT_YCBCR_P010); + requestCfg_.format = inSurfacePixelFmt_; + outSurfacePixelFmt_ = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP); + VideoProcessing_ErrorCode ret = VIDEO_PROCESSING_SUCCESS; + sptr buffer; + CM_ColorSpaceInfo inColspcInfo = { + static_cast((inColspc_ & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET), + static_cast((inColspc_ & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET), + static_cast((inColspc_ & MATRIX_MASK) >> MATRIX_OFFSET), + static_cast((inColspc_ & RANGE_MASK) >> RANGE_OFFSET) + }; + ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x1; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + ret = OH_VideoProcessing_SetSurface(instance, window2); + ret = OH_VideoProcessing_Start(instance); + window->surface->RequestBuffer(buffer, fence_, requestCfg_); + SetMeatadata(buffer, inColspcInfo); + SetMeatadata(buffer, (uint32_t)inMetaType_); + window->surface->FlushBuffer(buffer, fence_, flushCfg_); + OH_VideoProcessing_SetSurface(instance, window2); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_Destroy(instance2); + OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +} \ No newline at end of file diff --git a/test/unittest/colorspace_converter_video/BUILD.gn b/test/unittest/colorspace_converter_video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..71e7b31396f44fb20579a29cb61354f0fc08063c --- /dev/null +++ b/test/unittest/colorspace_converter_video/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("colorspace_converter_video_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$INTERFACES_INNER_API_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$ALGORITHM_DIR/colorspace_converter/include", + "$ALGORITHM_DIR/colorspace_converter_video/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample", + ] + + sources = [ "colorspace_converter_video_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:media_foundation", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp b/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..50ab0386b3531672911f8a8279373677906ee6e7 --- /dev/null +++ b/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp @@ -0,0 +1,362 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "colorspace_converter_video_impl.h" +#include "colorspace_converter_video.h" +#include "colorspace_converter_video_description.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +struct CallbackT : public ColorSpaceConverterVideoCallback { + void OnError(int32_t errorCode) override + { + (void)errorCode; + } + + void OnState(int32_t state) override + { + (void)state; + } + + void OnOutputBufferAvailable(uint32_t index, CscvBufferFlag flag) override + { + (void)index; + (void)flag; + } +}; + +class ColorSpaceConverterVideoUnitTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp(void) {}; + void TearDown(void) {}; +}; + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_init_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + EXPECT_NE(nullptr, handle); + ColorSpaceConvertVideoDestroy(handle); + ColorSpaceConvertVideoDestroy(nullptr); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setcallback_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + ColorSpaceConverterVideoCallbackImpl::Callback cb; + auto ret = ColorSpaceConvertVideoSetCallback(handle, &cb, nullptr); + ColorSpaceConvertVideoDestroy(handle); + handle = nullptr; + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setcallback_02, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoSetCallback(handle, nullptr, nullptr); + ColorSpaceConvertVideoDestroy(handle); + handle = nullptr; + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setcallback_03, TestSize.Level1) +{ + ColorSpaceConverterVideoCallbackImpl::Callback cb; + auto ret = ColorSpaceConvertVideoSetCallback(nullptr, &cb, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setcallback_04, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoSetCallback(nullptr, nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setcallback_05, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + ColorSpaceConverterVideoCallbackImpl::Callback cb; + ColorSpaceConverterVideoCallbackImpl::Callback* cbp = &cb; + if (handle && cbp) { + auto ret = ColorSpaceConvertVideoSetCallback(handle, cbp, nullptr); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + } +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setoutputsurface_02, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoSetOutputSurface(handle, nullptr); + ColorSpaceConvertVideoDestroy(handle); + handle = nullptr; + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setoutputsurface_03, TestSize.Level1) +{ + sptr surface = nullptr; + sptr userData = nullptr; + auto sfp = reinterpret_cast(&surface); + auto ret = ColorSpaceConvertVideoSetOutputSurface(nullptr, sfp); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setoutputsurface_04, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoSetOutputSurface(nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setoutputsurface_05, TestSize.Level1) +{ + sptr surface = nullptr; + auto handle = ColorSpaceConvertVideoCreate(); + auto sfp = reinterpret_cast(&surface); + auto ret = ColorSpaceConvertVideoSetOutputSurface(handle, sfp); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_createinputsurface_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + sptr surface = nullptr; + auto sfp = reinterpret_cast(&surface); + auto ret = ColorSpaceConvertVideoCreateInputSurface(handle, sfp); + ColorSpaceConvertVideoDestroy(handle); + handle = nullptr; + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_createinputsurface_02, TestSize.Level1) +{ + sptr surface = nullptr; + auto sfp = reinterpret_cast(&surface); + auto ret = ColorSpaceConvertVideoCreateInputSurface(nullptr, sfp); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_configure_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + Format parameter; + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + static_cast(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + parameter.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, 0.6); + auto parameterP = reinterpret_cast(¶meter); + auto ret = ColorSpaceConvertVideoConfigure(handle, parameterP); + ColorSpaceConvertVideoDestroy(handle); + handle = nullptr; + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_configure_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoConfigure(nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_prepare_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoPrepare(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setparameter_02, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + + sptr inSurface = nullptr; + auto inSfp = reinterpret_cast(&inSurface); + auto ret = ColorSpaceConvertVideoCreateInputSurface(handle, inSfp); + + auto outSurfaceP = reinterpret_cast*>(inSfp); + auto outSurface = *outSurfaceP; + auto producer = outSurface->GetProducer(); + auto producerSurface = Surface::CreateSurfaceAsProducer(producer); + auto outSfp = reinterpret_cast(&producerSurface); + ret = ColorSpaceConvertVideoSetOutputSurface(handle, outSfp); + + Format parameter; + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_PIXEL_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_HDR_METADATA_TYPE, CM_METADATA_NONE); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_RANGE, RANGE_LIMITED); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_MATRIX, MATRIX_BT709); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_TRANS_FUNC, TRANSFUNC_BT709); + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_COLORSPACE_PRIMARIES, COLORPRIMARIES_BT709); + auto parameterP = reinterpret_cast(¶meter); + ret = ColorSpaceConvertVideoConfigure(handle, parameterP); + ret = ColorSpaceConvertVideoPrepare(handle); + + ret = ColorSpaceConvertVideoSetParameter(handle, nullptr); + ColorSpaceConvertVideoDestroy(handle); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setparameter_03, TestSize.Level1) +{ + Format parameter; + parameter.PutIntValue(CscVDescriptionKey::CSCV_KEY_RENDER_INTENT, + static_cast(RenderIntent::RENDER_INTENT_ABSOLUTE_COLORIMETRIC)); + parameter.PutDoubleValue(CscVDescriptionKey::CSCV_KEY_SDRUI_BRIGHTNESS_RATIO, 0.6); + auto parameterP = reinterpret_cast(¶meter); + auto ret = ColorSpaceConvertVideoSetParameter(nullptr, parameterP); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_setparameter_04, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoSetParameter(nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_getparameter_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoGetParameter(nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_getparameter_03, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoGetParameter(handle, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_reset_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + if (handle) { + auto ret = ColorSpaceConvertVideoReset(handle); + ColorSpaceConvertVideoDestroy(handle); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + } +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_reset_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoReset(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_release_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + if (handle) { + auto ret = ColorSpaceConvertVideoRelease(handle); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + } + ColorSpaceConvertVideoDestroy(handle); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_release_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoRelease(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_notifyEos_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoNotifyEos(handle); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); + ColorSpaceConvertVideoDestroy(handle); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_notifyEos_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoNotifyEos(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_releaseOutputBuffer_01, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoReleaseOutputBuffer(nullptr, 0, 0); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_start_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoStart(handle); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_start_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoStart(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_stop_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoStop(handle); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_stop_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoStop(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_releaseOutputBuffer_02, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + auto ret = ColorSpaceConvertVideoReleaseOutputBuffer(handle, 0, 0); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_flush_01, TestSize.Level1) +{ + auto handle = ColorSpaceConvertVideoCreate(); + if (handle) { + auto ret = ColorSpaceConvertVideoFlush(handle); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + } +} + +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_flush_02, TestSize.Level1) +{ + auto ret = ColorSpaceConvertVideoFlush(nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/detail_enhancer/BUILD.gn b/test/unittest/detail_enhancer/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..04fe84b06434bd5c17b997e3b134172f5d7ab53b --- /dev/null +++ b/test/unittest/detail_enhancer/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("detail_enhancer_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "$ALGORITHM_DIR/detail_enhancer/include", + "$ALGORITHM_DIR/extensions/detail_enhancer/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + ] + + sources = [ "detail_enhancer_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$TEST_UTILS_PATH/DetailEnhancer/sample:detailEnh_test_utils", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp b/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..401ba53dddde92f9bb331740d9659ed3fcbc9c23 --- /dev/null +++ b/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp @@ -0,0 +1,2299 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "graphic_common_c.h" +#include "detailEnh_sample.h" +#include "detailEnh_sample_define.h" +#include "detail_enhancer_image.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +int32_t GetFileSize(int32_t width, int32_t height, int32_t format) +{ + int32_t size = width * height; + switch (format) { + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: + size = size * 3 / 2; // 3; 2 + break; + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + size *= 4; // 4 + break; + default: + size *= 3; // 3 + break; + } + return size; +} + +class DetailEnhancerUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); +}; + +void DetailEnhancerUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void DetailEnhancerUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void DetailEnhancerUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void DetailEnhancerUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +// detail enhancer init +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_01, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + EXPECT_NE(detailEnh, nullptr); +} + +// detail enhancer init meultiple times +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_02, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + detailEnh = DetailEnhancerImage::Create(); + detailEnh = DetailEnhancerImage::Create(); + detailEnh = DetailEnhancerImage::Create(); + detailEnh = DetailEnhancerImage::Create(); + EXPECT_NE(detailEnh, nullptr); +} + +// set parameter to high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_04, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + auto res = detailEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// set parameter to medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_05, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + auto res = detailEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// set parameter to low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_06, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + auto res = detailEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// set parameter to none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_07, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + auto res = detailEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// get parameters after set to high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_09, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .level = DETAIL_ENH_LEVEL_HIGH, + .uri = "", + }; + auto res_set = detailEnh->SetParameter(param); + auto res_get = detailEnh->GetParameter(param); + EXPECT_EQ(res_set, VPE_ALGO_ERR_OK); + EXPECT_EQ(res_get, VPE_ALGO_ERR_OK); +} + +// get parameters after set to medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_10, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .level = DETAIL_ENH_LEVEL_MEDIUM, + .uri = "", + }; + auto res_set = detailEnh->SetParameter(param); + auto res_get = detailEnh->GetParameter(param); + EXPECT_EQ(res_set, VPE_ALGO_ERR_OK); + EXPECT_EQ(res_get, VPE_ALGO_ERR_OK); +} + +// get parameters after set to low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_11, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .level = DETAIL_ENH_LEVEL_LOW, + .uri = "", + }; + auto res_set = detailEnh->SetParameter(param); + auto res_get = detailEnh->GetParameter(param); + EXPECT_EQ(res_set, VPE_ALGO_ERR_OK); + EXPECT_EQ(res_get, VPE_ALGO_ERR_OK); +} + +// get parameters after set to none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_12, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .level = DETAIL_ENH_LEVEL_NONE, + .uri = "", + }; + auto res_set = detailEnh->SetParameter(param); + auto res_get = detailEnh->GetParameter(param); + EXPECT_EQ(res_set, VPE_ALGO_ERR_OK); + EXPECT_EQ(res_get, VPE_ALGO_ERR_OK); +} + +// get parameters +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_13, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .level = DETAIL_ENH_LEVEL_HIGH, + .uri = "", + }; + auto res = detailEnh->GetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// test getBuffer with bad size +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_14, TestSize.Level1) +{ + int32_t width = -1; + int32_t height = -1; + int32_t format = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto createdBuffer = CreateSurfaceBuffer(format, width, height); + EXPECT_EQ(createdBuffer, nullptr); +} + +// set parameter uri to abnormal value +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_init_15, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "x", + .level = DETAIL_ENH_LEVEL_NONE, + }; + auto res = detailEnh->SetParameter(param); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_01, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_02, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_03, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_04, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_05, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 0.5x, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_06, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 0.5x, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_07, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 768; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 0.5x, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_08, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 768; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 0.5x, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_09, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 768; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 0.5x, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_10, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 768; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 2x, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_11, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 8192; + int32_t outputHeight = 6144; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 2x, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_12, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 2x, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_13, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 2x, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_14, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB different resolution 2x, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_15, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + + +// process RGB to RGB unusually resolution, aisr exceed x32 scales +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_16, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 3145728; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB unusually resolution, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_17, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 3145728; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB unusually resolution, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_18, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 3145728; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB unusually resolution, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_19, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 3145728; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB unusually resolution, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_20, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 3145728; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to I420; different format, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_21, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to I420; different format, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_22, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to I420; different format, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_23, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to I420; different format, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_24, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to I420; different format, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_25, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process with unsupported input Format +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_26, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_422_SP; // unsupported format + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process with unsupported output Format +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_27, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_422_SP; // unsupported format + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process RGB to RGB witout setting parameters first +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_28, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly small image input, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_29, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly small image input, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_30, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly small image input, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_31, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly small image input, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_32, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly small image input, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_33, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly big image input, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_34, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 30720; + int32_t outputHeight = 17280; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly big image input, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_35, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 30720; + int32_t outputHeight = 17280; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly big image input, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_36, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 30720; + int32_t outputHeight = 17280; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly big image input, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_37, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 30720; + int32_t outputHeight = 17280; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 extremly big image input, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_38, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 30720; + int32_t outputHeight = 17280; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 odd number input, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_39, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1025; + int32_t outputHeight = 767; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 odd number input, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_40, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1025; + int32_t outputHeight = 767; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 odd number input, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_41, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1025; + int32_t outputHeight = 767; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 odd number input, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_42, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1025; + int32_t outputHeight = 767; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 odd number input, nonw +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_43, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1025; + int32_t outputHeight = 767; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 input output different width/height ratio, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_44, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 480; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 input output different width/height ratio, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_45, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 480; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 input output different width/height ratio, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_46, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 480; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 input output different width/height ratio, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_47, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 480; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process NV12 to NV12 input output different width/height ratio, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_48, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 1024; + int32_t outputHeight = 480; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process but buffer is nullptr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_49, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t ret = detailEnh->Process(nullptr, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// repeating process I420 to I420 with set parameters +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_50, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + // repeat again with AISR + DetailEnhancerParameters param2 { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param2); + int32_t ret2 = detailEnh->Process(input, output); + // repeat again with medium + DetailEnhancerParameters param3 { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param3); + int32_t ret3 = detailEnh->Process(input, output); + // repeat again with low + DetailEnhancerParameters param4 { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param4); + int32_t ret4 = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + EXPECT_EQ(ret2, VPE_ALGO_ERR_OK); + EXPECT_NE(ret3, VPE_ALGO_ERR_OK); + EXPECT_NE(ret4, VPE_ALGO_ERR_OK); +} + +// process twice +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_51, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extern c interface +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_52, TestSize.Level1) +{ + auto input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 4096, 3072); + auto output = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 2048, 1536); + DetailEnhancerLevel level = DETAIL_ENH_LEVEL_HIGH; + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + typedef int32_t (*DetailEnhancerProcessImage)(int32_t, + OHNativeWindowBuffer*, OHNativeWindowBuffer*, int32_t); + typedef int32_t (*DetailEnhancerDestroy)(int32_t*); + + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + auto detailEnhProcessImage = + reinterpret_cast(dlsym(lib, "DetailEnhancerProcessImage")); + auto detailEnhDestroy = reinterpret_cast(dlsym(lib, "DetailEnhancerDestroy")); + + int32_t instanceSrId = -1; + int32_t res = detailEnhCreate(&instanceSrId); + if (res != 0 || instanceSrId == -1) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + if (input == nullptr || output == nullptr) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + OHNativeWindowBuffer* srIn = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&input); + OHNativeWindowBuffer* srOut = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&output); + res = detailEnhProcessImage(instanceSrId, srIn, srOut, static_cast(level)); + if (res != 0) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + res = detailEnhDestroy(&instanceSrId); + if (res != 0) { + dlclose(lib); + return; + } + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + res = detailEnhDestroy(&instanceSrId); // destroy twice + dlclose(lib); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +// check extern c interface, create with null instance +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_53, TestSize.Level1) +{ + auto input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 4096, 3072); + auto output = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 2048, 1536); + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + int32_t ret = detailEnhCreate(nullptr); + dlclose(lib); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extern c interface, process with null output buffer +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_54, TestSize.Level1) +{ + auto input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 4096, 3072); + auto output = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 2048, 1536); + DetailEnhancerLevel level = DETAIL_ENH_LEVEL_HIGH; + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + typedef int32_t (*DetailEnhancerProcessImage)(int32_t, + OHNativeWindowBuffer*, OHNativeWindowBuffer*, int32_t); + typedef int32_t (*DetailEnhancerDestroy)(int32_t*); + + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + auto detailEnhProcessImage = + reinterpret_cast(dlsym(lib, "DetailEnhancerProcessImage")); + auto detailEnhDestroy = reinterpret_cast(dlsym(lib, "DetailEnhancerDestroy")); + + int32_t instanceSrId = -1; + int32_t res = detailEnhCreate(&instanceSrId); + if (res != 0 || instanceSrId == -1) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + if (input == nullptr || output == nullptr) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + OHNativeWindowBuffer* srIn = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&input); + res = detailEnhProcessImage(instanceSrId, srIn, nullptr, static_cast(level)); + if (res != 0) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + res = detailEnhDestroy(&instanceSrId); + if (res != 0) { + dlclose(lib); + return; + } + dlclose(lib); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +// check extern c interface, destroy nullptr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_55, TestSize.Level1) +{ + auto input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 4096, 3072); + auto output = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 2048, 1536); + DetailEnhancerLevel level = DETAIL_ENH_LEVEL_HIGH; + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + typedef int32_t (*DetailEnhancerProcessImage)(int32_t, + OHNativeWindowBuffer*, OHNativeWindowBuffer*, int32_t); + typedef int32_t (*DetailEnhancerDestroy)(int32_t*); + + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + auto detailEnhProcessImage = + reinterpret_cast(dlsym(lib, "DetailEnhancerProcessImage")); + auto detailEnhDestroy = reinterpret_cast(dlsym(lib, "DetailEnhancerDestroy")); + + int32_t instanceSrId = -1; + int32_t res = detailEnhCreate(&instanceSrId); + if (res != 0 || instanceSrId == -1) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + if (input == nullptr || output == nullptr) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + OHNativeWindowBuffer* srIn = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&input); + OHNativeWindowBuffer* srOut = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&output); + res = detailEnhProcessImage(instanceSrId, srIn, srOut, static_cast(level)); + if (res != 0) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + res = detailEnhDestroy(nullptr); + if (res != 0) { + dlclose(lib); + return; + } + dlclose(lib); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +// process BGR to BGR, aisr +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_56, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 4096; + int32_t outputHeight = 3072; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 1024, 768); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process BGR to BGR, high +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_57, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// process BGR to BGR, medium +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_58, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process BGR to BGR, low +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_59, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// process BGR to BGR, none +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_60, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, 4096, 3072); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_SP, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_01, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_SP, high +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_02, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_SP, medium +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_03, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_SP, low +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_04, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_SP, none +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_05, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCBCR_420_P, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_06, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCBCR_420_P, high +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_07, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCBCR_420_P, medium +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_08, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCBCR_420_P, low +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_09, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCBCR_420_P, none +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_10, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_P, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_11, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_P, high +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_12, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_P, medium +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_13, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_P, low +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_14, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process YCRCB_420_P, none +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_15, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process invalid resolution +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_16, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 0; + int32_t inputHeight = 0; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process invalid resolution +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_17, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 0; + int32_t outputHeight = 0; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process null output buffer +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_18, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// Exceeds minimum reduction ratio +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_19, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2; + int32_t outputHeight = 1; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, nullptr); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process BGRA_8888, aisr MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_20, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process BGRA_8888, high +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_21, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process BGRA_8888, medium +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_22, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process BGRA_8888, low +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_23, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process BGRA_8888, none +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_24, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// check enforce extension extream vision engine, process RGBA1010102, HIGH +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_25, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + .forceEve = 1, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check enforce extension extream vision engine, process RGBA1010102, MEDIUM +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_26, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + .forceEve = 1, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check enforce extension extream vision engine, process RGBA1010102, LOW +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_27, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + .forceEve = 1, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check enforce extension extream vision engine, process RGBA1010102, NONE +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_28, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + .forceEve = 1, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process RGBA1010102, HIGH +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_29, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// check extension extream vision engine, process RGBA1010102, HIGH, format is not consistent +HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_30, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// normal process of aisr, AISR MOVED TO VPE_EXT +HWTEST_F(DetailEnhancerUnitTest, aisr_process_9, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + ret = detailEnh->Process(input, output); // try to process twice + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// normal process of aisr, high +HWTEST_F(DetailEnhancerUnitTest, aisr_process_10, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + ret = detailEnh->Process(input, output); // try to process twice + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +// normal process of aisr, medium +HWTEST_F(DetailEnhancerUnitTest, aisr_process_11, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); + ret = detailEnh->Process(input, output); // try to process twice + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// normal process of aisr, low +HWTEST_F(DetailEnhancerUnitTest, aisr_process_12, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); + ret = detailEnh->Process(input, output); // try to process twice + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// normal process of aisr, none +HWTEST_F(DetailEnhancerUnitTest, aisr_process_13, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerImage::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 4096; + int32_t inputHeight = 3072; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); + ret = detailEnh->Process(input, output); // try to process twice + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// destroy twice +HWTEST_F(DetailEnhancerUnitTest, aisr_process_1, TestSize.Level1) +{ + auto input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 4096, 3072); + auto output = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 2048, 1536); + DetailEnhancerLevel level = DETAIL_ENH_LEVEL_HIGH; + void* lib = dlopen("/system/lib64/libvideoprocessingengine.z.so", RTLD_LAZY); + if (lib == nullptr) { + printf("cannot load vpe lib\n"); + return; + } + + typedef int32_t (*DetailEnhancerCreate)(int32_t*); + typedef int32_t (*DetailEnhancerProcessImage)(int32_t, + OHNativeWindowBuffer*, OHNativeWindowBuffer*, int32_t); + typedef int32_t (*DetailEnhancerDestroy)(int32_t*); + + auto detailEnhCreate = reinterpret_cast(dlsym(lib, "DetailEnhancerCreate")); + auto detailEnhProcessImage = + reinterpret_cast(dlsym(lib, "DetailEnhancerProcessImage")); + auto detailEnhDestroy = reinterpret_cast(dlsym(lib, "DetailEnhancerDestroy")); + + int32_t instanceSrId = -1; + int32_t res = detailEnhCreate(&instanceSrId); + if (res != 0 || instanceSrId == -1) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + if (input == nullptr || output == nullptr) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + OHNativeWindowBuffer* srIn = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&input); + OHNativeWindowBuffer* srOut = OH_NativeWindow_CreateNativeWindowBufferFromSurfaceBuffer(&output); + res = detailEnhProcessImage(instanceSrId, srIn, srOut, static_cast(level)); + if (res != 0) { + detailEnhDestroy(&instanceSrId); + dlclose(lib); + return; + } + res = detailEnhDestroy(&instanceSrId); + if (res != 0) { + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + dlclose(lib); + return; + } + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + res = detailEnhDestroy(&instanceSrId); // destroy twice + dlclose(lib); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/detail_enhancer_video/BUILD.gn b/test/unittest/detail_enhancer_video/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..aad23a646171a60efff1913f97312e7617037071 --- /dev/null +++ b/test/unittest/detail_enhancer_video/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("detail_enhancer_video_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "$ALGORITHM_DIR/detail_enhancer/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$ALGORITHM_DIR/detail_enhancer_video/include", + "$TEST_UTILS_PATH/DetailEnhancer/sample/video", + ] + + sources = [ "detail_enhancer_video_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:media_foundation", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} + +ohos_unittest("detail_enhancer_video_innerapi_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "$ALGORITHM_DIR/detail_enhancer/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$ALGORITHM_DIR/detail_enhancer_video/include", + "$TEST_UTILS_PATH/DetailEnhancer/sample/video", + ] + + sources = [ "detail_enhancer_video_innerapi_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:media_foundation", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/detail_enhancer_video/detail_enhancer_video_innerapi_unit_test.cpp b/test/unittest/detail_enhancer_video/detail_enhancer_video_innerapi_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..286ee36447663c408e170bd4ab6f6de6f1885606 --- /dev/null +++ b/test/unittest/detail_enhancer_video/detail_enhancer_video_innerapi_unit_test.cpp @@ -0,0 +1,423 @@ +/* + * Copyright (C) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#include + +#include + +#include "external_window.h" +#include "surface/window.h" + +#include "algorithm_errors.h" +#include "algorithm_utils.h" +#include "algorithm_video.h" + +using namespace testing::ext; + +namespace { +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; + +int64_t GetSystemTime() +{ + struct timespec now; + clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} +} // namespace + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class DetailEnhancerVideoCallbackImpl : public VpeVideoCallback { +public: + DetailEnhancerVideoCallbackImpl() = default; + ~DetailEnhancerVideoCallbackImpl() override = default; + DetailEnhancerVideoCallbackImpl(const DetailEnhancerVideoCallbackImpl&) = delete; + DetailEnhancerVideoCallbackImpl& operator=(const DetailEnhancerVideoCallbackImpl&) = delete; + DetailEnhancerVideoCallbackImpl(DetailEnhancerVideoCallbackImpl&&) = delete; + DetailEnhancerVideoCallbackImpl& operator=(DetailEnhancerVideoCallbackImpl&&) = delete; + + void OnError(VPEAlgoErrCode errorCode) final; + void OnState(VPEAlgoState state) final; + void OnEffectChange(uint32_t type) final; + void OnOutputFormatChanged(const Format& format) final; + void OnOutputBufferAvailable(uint32_t index, VpeBufferFlag flag) final; + void OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info) final; +}; + +void DetailEnhancerVideoCallbackImpl::OnError(VPEAlgoErrCode errorCode) +{ + std::cout << "OnError:" << AlgorithmUtils::ToString(errorCode) << std::endl; +} + +void DetailEnhancerVideoCallbackImpl::OnState(VPEAlgoState state) +{ + std::cout << "OnState:" << AlgorithmUtils::ToString(state) << std::endl; +} + +void DetailEnhancerVideoCallbackImpl::OnEffectChange(uint32_t type) +{ + std::cout << "OnEffectChange:0x" << std::hex << type << std::endl; +} + +void DetailEnhancerVideoCallbackImpl::OnOutputFormatChanged(const Format& format) +{ + std::cout << "OnOutputFormatChanged:" << format.Stringify() << std::endl; +} + +void DetailEnhancerVideoCallbackImpl::OnOutputBufferAvailable(uint32_t index, VpeBufferFlag flag) +{ + std::cout << "OnOutputBufferAvailable: index=" << index << " flag=" << flag << std::endl; +} + +void DetailEnhancerVideoCallbackImpl::OnOutputBufferAvailable(uint32_t index, const VpeBufferInfo& info) +{ + std::cout << "OnOutputBufferAvailable: index=" << index << " flag=" << info.flag << + " pts=" << info.presentationTimestamp << std::endl; +} + +class DetailEnhancerVideoInnerAPIUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + + void SetUp(); + void TearDown(); + +protected: + uint32_t FlushSurf(OHNativeWindowBuffer* ohNativeWindowBuffer); + + sptr surface{}; + OHNativeWindow* nativeWindow{}; +}; + +void DetailEnhancerVideoInnerAPIUnitTest::SetUpTestCase(void) +{ + std::cout << "[SetUpTestCase]: " << std::endl; +} + +void DetailEnhancerVideoInnerAPIUnitTest::TearDownTestCase(void) +{ + std::cout << "[TearDownTestCase]: " << std::endl; +} + +void DetailEnhancerVideoInnerAPIUnitTest::SetUp(void) +{ + std::cout << "[SetUp]: SetUp!!!" << std::endl; +} + +void DetailEnhancerVideoInnerAPIUnitTest::TearDown(void) +{ + std::cout << "[TearDown]: over!!!" << std::endl; +} + +uint32_t DetailEnhancerVideoInnerAPIUnitTest::FlushSurf(OHNativeWindowBuffer* ohNativeWindowBuffer) +{ + struct Region region; + struct Region::Rect rect; + rect.x = 0; + rect.y = 0; + rect.w = DEFAULT_WIDTH; + rect.h = DEFAULT_HEIGHT; + region.rects = ▭ + NativeWindowHandleOpt(nativeWindow, SET_UI_TIMESTAMP, GetSystemTime()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(nativeWindow, ohNativeWindowBuffer, -1, region); + if (err != 0) { + std::cout << "FlushBuffer failed" << std::endl; + return 1; + } + return 0; +} + +// detail enhancer init +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_init_01, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + EXPECT_NE(detailEnh, nullptr); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_02, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = nullptr; + auto ret = detailEnh->RegisterCallback(cb); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_03, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_04, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_MEDIUM), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_05, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_LOW), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_06, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto ret = detailEnh->GetInputSurface(); + EXPECT_NE(ret, nullptr); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_07, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto ret = detailEnh->GetInputSurface(); + ret = detailEnh->GetInputSurface(); + EXPECT_EQ(ret, nullptr); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_08, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + sptr surface = nullptr; + auto ret = detailEnh->SetOutputSurface(surface); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_09, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto ret = detailEnh->ReleaseOutputBuffer(0, true); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_10, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + int64_t renderTimestamp = 0; + auto ret = detailEnh->RenderOutputBufferAtTime(0, renderTimestamp); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// set parameter to midium +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_11, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_MEDIUM), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +// set parameter to low +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_12, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_LOW), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +// set parameter to none +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_13, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_NONE), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_14, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_15, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_16, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + EXPECT_NE(detailEnh->GetInputSurface(), nullptr); + EXPECT_NE(detailEnh->Start(), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_17, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto res = detailEnh->Stop(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_18, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface1 = detailEnh->GetInputSurface(); + EXPECT_EQ(detailEnh->SetOutputSurface(surface1), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_19, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface1 = detailEnh->GetInputSurface(); + EXPECT_EQ(detailEnh->SetOutputSurface(surface1), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); + EXPECT_NE(detailEnh->ReleaseOutputBuffer(100, true), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_20, TestSize.Level1) +{ + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface1 = detailEnh->GetInputSurface(); + EXPECT_EQ(detailEnh->SetOutputSurface(surface1), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); + EXPECT_NE(detailEnh->RenderOutputBufferAtTime(100, 0), VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_21, TestSize.Level1) +{ + OHNativeWindowBuffer* ohNativeWindowBuffer; + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto surface2 = detailEnh2->GetInputSurface(); + EXPECT_EQ(detailEnh->SetOutputSurface(surface2), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_22, TestSize.Level1) +{ + OHNativeWindowBuffer* ohNativeWindowBuffer; + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto surface2 = detailEnh2->GetInputSurface(); + surface2->SetRequestWidthAndHeight(10, 10); + EXPECT_EQ(detailEnh->SetOutputSurface(surface2), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +HWTEST_F(DetailEnhancerVideoInnerAPIUnitTest, detailenhancer_23, TestSize.Level1) +{ + OHNativeWindowBuffer* ohNativeWindowBuffer; + auto detailEnh = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + std::shared_ptr cb = std::make_shared(); + EXPECT_EQ(detailEnh->RegisterCallback(cb), VPE_ALGO_ERR_OK); + Format param{}; + EXPECT_EQ(param.PutIntValue(ParameterKey::DETAIL_ENHANCER_QUALITY_LEVEL, DETAIL_ENHANCER_LEVEL_HIGH), true); + EXPECT_EQ(detailEnh->SetParameter(param), VPE_ALGO_ERR_OK); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = VpeVideo::Create(VIDEO_TYPE_DETAIL_ENHANCER); + auto surface2 = detailEnh2->GetInputSurface(); + surface2->SetRequestWidthAndHeight(10, 0); + EXPECT_EQ(detailEnh->SetOutputSurface(surface2), VPE_ALGO_ERR_OK); + EXPECT_EQ(detailEnh->Start(), VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp b/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7deb3a0108d869694787717cadac1e0dcbd26307 --- /dev/null +++ b/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp @@ -0,0 +1,454 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "algorithm_common.h" +#include "algorithm_errors.h" + +#include "detail_enhancer_video_impl.h" +#include "detail_enhancer_video.h" +#include "surface/window.h" +#include "external_window.h" + +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class DetailEnhancerVideoCallbackImpl : public DetailEnhancerVideoCallback { +public: + DetailEnhancerVideoCallbackImpl() = default; + ~DetailEnhancerVideoCallbackImpl() override = default; + DetailEnhancerVideoCallbackImpl(const DetailEnhancerVideoCallbackImpl&) = delete; + DetailEnhancerVideoCallbackImpl& operator=(const DetailEnhancerVideoCallbackImpl&) = delete; + DetailEnhancerVideoCallbackImpl(DetailEnhancerVideoCallbackImpl&&) = delete; + DetailEnhancerVideoCallbackImpl& operator=(DetailEnhancerVideoCallbackImpl&&) = delete; + + void OnError(VPEAlgoErrCode errorCode) override; + void OnState(VPEAlgoState state) override; + void OnOutputBufferAvailable(uint32_t index, DetailEnhBufferFlag flag) override; +}; +void DetailEnhancerVideoCallbackImpl::OnOutputBufferAvailable(uint32_t index, DetailEnhBufferFlag flag) +{ + switch (flag) { + case DETAIL_ENH_BUFFER_FLAG_NONE: + std::cout << "OnOutputBufferAvailable: normal" << std::endl; + break; + case DETAIL_ENH_BUFFER_FLAG_EOS: + std::cout << "OnOutputBufferAvailable: end of stream" << std::endl; + break; + default: + std::cout << "OnOutputBufferAvailable: unknown" << std::endl; + break; + } +} +void DetailEnhancerVideoCallbackImpl::OnError(VPEAlgoErrCode errorCode) +{ + (void)errorCode; +} +void DetailEnhancerVideoCallbackImpl::OnState(VPEAlgoState state) +{ + (void)state; +} + +class DetailEnhancerVideoUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); + sptr surface; + OHNativeWindow *nativeWindow; + uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer); +}; + +void DetailEnhancerVideoUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void DetailEnhancerVideoUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void DetailEnhancerVideoUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void DetailEnhancerVideoUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +int64_t GetSystemTime() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} + +uint32_t DetailEnhancerVideoUnitTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) +{ + struct Region region; + struct Region::Rect *rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = DEFAULT_WIDTH; + rect->h = DEFAULT_HEIGHT; + region.rects = rect; + NativeWindowHandleOpt(nativeWindow, SET_UI_TIMESTAMP, GetSystemTime()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(nativeWindow, ohNativeWindowBuffer, -1, region); + delete rect; + if (err != 0) { + cout << "FlushBuffer failed" << endl; + return 1; + } + return 0; +} + +// detail enhancer init +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_init_01, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + EXPECT_NE(detailEnhVideo, nullptr); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_02, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + std::shared_ptr cb = nullptr; + auto ret = detailEnhVideo->RegisterCallback(cb); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_03, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + auto ret = detailEnhVideo->SetParameter(param, VIDEO); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_04, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + auto ret = detailEnhVideo->SetParameter(param, VIDEO); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_05, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + auto ret = detailEnhVideo->SetParameter(param, VIDEO); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_06, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + auto ret = detailEnhVideo->GetInputSurface(); + EXPECT_NE(ret, nullptr); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_07, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + auto ret = detailEnhVideo->GetInputSurface(); + ret = detailEnhVideo->GetInputSurface(); + EXPECT_EQ(ret, nullptr); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_08, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + sptr surface = nullptr; + auto ret = detailEnhVideo->SetOutputSurface(surface); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_09, TestSize.Level1) +{ + auto detailEnhVideo = DetailEnhancerVideo::Create(); + + auto ret = detailEnhVideo->RenderOutputBuffer(0); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_10, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + auto ret = detailEnh->NotifyEos(); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +// set parameter to midium +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_11, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_MEDIUM, + }; + auto res = detailEnh->SetParameter(param, VIDEO); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// set parameter to low +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_12, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_LOW, + }; + auto res = detailEnh->SetParameter(param, VIDEO); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +// set parameter to none +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_13, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_NONE, + }; + auto res = detailEnh->SetParameter(param, VIDEO); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_14, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + auto res = detailEnh->SetParameter(param, VIDEO); + res = detailEnh->SetParameter(param, VIDEO); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_15, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_16, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto ret = detailEnh->GetInputSurface(); + res = detailEnh->Start(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_17, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + auto res = detailEnh->Stop(); + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_18, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface1 = detailEnh->GetInputSurface(); + res = detailEnh->SetOutputSurface(surface1); + res = detailEnh->Start(); + res = detailEnh->NotifyEos(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_19, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface1 = detailEnh->GetInputSurface(); + res = detailEnh->SetOutputSurface(surface1); + res = detailEnh->Start(); + res = detailEnh->ReleaseOutputBuffer(100, true); // 100 index + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_20, TestSize.Level1) +{ + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface1 = detailEnh->GetInputSurface(); + res = detailEnh->SetOutputSurface(surface1); + res = detailEnh->Start(); + res = detailEnh->ReleaseOutputBuffer(100, false); // 100 index + EXPECT_NE(res, VPE_ALGO_ERR_OK); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_21, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = DetailEnhancerVideo::Create(); + auto surface2 = detailEnh2->GetInputSurface(); + res = detailEnh->SetOutputSurface(surface2); + res = detailEnh->Start(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_22, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = DetailEnhancerVideo::Create(); + auto surface2 = detailEnh2->GetInputSurface(); + surface2->SetRequestWidthAndHeight(10, 10); + res = detailEnh->SetOutputSurface(surface2); + res = detailEnh->Start(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_23, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + auto detailEnh = DetailEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + auto res = detailEnh->RegisterCallback(cb); + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + res = detailEnh->SetParameter(param, VIDEO); + auto surface = detailEnh->GetInputSurface(); + auto detailEnh2 = DetailEnhancerVideo::Create(); + auto surface2 = detailEnh2->GetInputSurface(); + surface2->SetRequestWidthAndHeight(10, 0); + res = detailEnh->SetOutputSurface(surface2); + res = detailEnh->Start(); + EXPECT_EQ(res, VPE_ALGO_ERR_OK); + + int fenceFd = -1; + nativeWindow = CreateNativeWindowFromSurface(&surface); + auto ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/detail_enhancer_video_ndk/BUILD.gn b/test/unittest/detail_enhancer_video_ndk/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..28b8a50fc9842b56b5afad41de83868363d387eb --- /dev/null +++ b/test/unittest/detail_enhancer_video_ndk/BUILD.gn @@ -0,0 +1,62 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("detail_enhancer_video_ndk_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$ALGORITHM_DIR/detail_enhancer/include", + "$FRAMEWORK_DIR/capi/detail_enhancer/include", + "$INTERFACES_DIR/kits/c", + "$FRAMEWORK_DIR/capi/video_processing/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/interface/kits/c", + "$ALGORITHM_DIR/detail_enhancer_video/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c", + ] + + sources = [ "detail_enhancer_video_ndk_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "//foundation/multimedia/media_foundation/video_processing_engine/framework:video_processing", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c/video_processing:libvideo_processing_ndk", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:native_media_core", + "graphic_surface:surface", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/detail_enhancer_video_ndk/detail_enhancer_video_ndk_unit_test.cpp b/test/unittest/detail_enhancer_video_ndk/detail_enhancer_video_ndk_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f0da9b65bdf73d1643655a5bfa660fa9a8f42eaf --- /dev/null +++ b/test/unittest/detail_enhancer_video_ndk/detail_enhancer_video_ndk_unit_test.cpp @@ -0,0 +1,956 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include + +#include + +#include "native_avformat.h" + +#include "video_processing.h" +#include "video_processing_types.h" +#include "video_processing_callback_impl.h" +#include "video_processing_callback_native.h" +#include "video_environment_native.h" +#include "video_processing_impl.h" + +#include "detail_enhancer_video_impl.h" +#include "detail_enhancer_video.h" +#include "surface/window.h" +#include "external_window.h" + +namespace { +constexpr int64_t NANOS_IN_SECOND = 1000000000L; +constexpr int64_t NANOS_IN_MICRO = 1000L; +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; + +std::condition_variable g_cvStop{}; +std::mutex g_lock{}; +bool g_isStoped = true; + +void WaitForStop() +{ + std::cout << "wait for stop flag" << std::endl; + std::unique_lock lock(g_lock); + g_cvStop.wait(lock, [] { return g_isStoped; }); + std::cout << "stop flag is " << g_isStoped << std::endl; +} +} + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +void OnError(OH_VideoProcessing *handle, VideoProcessing_ErrorCode errorCode, void* userData) +{ + // do something +} +void OnState(OH_VideoProcessing* instance, VideoProcessing_State state, void* userData) +{ + if (state == VIDEO_PROCESSING_STATE_STOPPED) { + std::cout << "state is VIDEO_PROCESSING_STATE_STOPPED" << std::endl; + { + std::lock_guard lock(g_lock); + g_isStoped = true; + } + g_cvStop.notify_one(); + } else { + std::cout << "state is VIDEO_PROCESSING_STATE_RUNNING" << std::endl; + std::lock_guard lock(g_lock); + g_isStoped = false; + std::cout << "start and stop flag is false" << std::endl; + } + if (userData != nullptr) { + VideoProcessing_State* userState = static_cast(userData); + *userState = state; + } +} + +void OnNewOutputBuffer(OH_VideoProcessing* instance, uint32_t index, void* userData) +{ + (void)userData; + (void)OH_VideoProcessing_RenderOutputBuffer(instance, index); +} + + +class DetailEnhancerVideoNdkUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); + sptr surface; + uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer, OHNativeWindow *window); +}; + +void DetailEnhancerVideoNdkUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void DetailEnhancerVideoNdkUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void DetailEnhancerVideoNdkUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void DetailEnhancerVideoNdkUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +int64_t GetSystemTime() +{ + struct timespec now; + (void)clock_gettime(CLOCK_BOOTTIME, &now); + int64_t nanoTime = reinterpret_cast(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec; + + return nanoTime / NANOS_IN_MICRO; +} + +uint32_t DetailEnhancerVideoNdkUnitTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer, OHNativeWindow *window) +{ + struct Region region; + struct Region::Rect *rect = new Region::Rect(); + rect->x = 0; + rect->y = 0; + rect->w = DEFAULT_WIDTH; + rect->h = DEFAULT_HEIGHT; + region.rects = rect; + NativeWindowHandleOpt(window, SET_UI_TIMESTAMP, GetSystemTime()); + int32_t err = OH_NativeWindow_NativeWindowFlushBuffer(window, ohNativeWindowBuffer, -1, region); + delete rect; + if (err != 0) { + cout << "FlushBuffer failed" << endl; + return 1; + } + return 0; +} + +// context init nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_01, TestSize.Level1) +{ + int createType = 0x4; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// context create without init +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_02, TestSize.Level1) +{ + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + auto ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); +} + +// context init 2 +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_03, TestSize.Level1) +{ + int createType = 0x2; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// context init 1 +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_04, TestSize.Level1) +{ + int createType = 0x1; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// context init -1 +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_05, TestSize.Level1) +{ + int createType = -1; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// context init repeat +HWTEST_F(DetailEnhancerVideoNdkUnitTest, create_instance_06, TestSize.Level1) +{ + int createType = 0x4; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + ret = OH_VideoProcessing_InitializeEnvironment(); + ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + ret = OH_VideoProcessing_InitializeEnvironment(); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// deinit +HWTEST_F(DetailEnhancerVideoNdkUnitTest, deinit_01, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// context init and destroy +HWTEST_F(DetailEnhancerVideoNdkUnitTest, destroy_instance_01, TestSize.Level1) +{ + int createType = 0x4; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// context no create but destroy +HWTEST_F(DetailEnhancerVideoNdkUnitTest, destroy_instance_02, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Destroy(instance); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// context no create but destroy without initialize +HWTEST_F(DetailEnhancerVideoNdkUnitTest, destroy_instance_03, TestSize.Level1) +{ + OH_VideoProcessing* instance = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_Destroy(instance); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); +} + +// color space converter support check +HWTEST_F(DetailEnhancerVideoNdkUnitTest, check_support_01, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo = nullptr; + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo = nullptr; + auto res = OH_VideoProcessing_IsColorSpaceConversionSupported(sourceVideoInfo, destinationVideoInfo); + EXPECT_EQ(res, false); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +// Set param nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, set_parameter_01, TestSize.Level1) +{ + int createType = 0x4; + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = nullptr; + ret = OH_VideoProcessing_SetParameter(instance, parameter); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// set normal +HWTEST_F(DetailEnhancerVideoNdkUnitTest, set_parameter_02, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_VideoProcessing_SetParameter(instance, parameter); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// get parameter but param not null +HWTEST_F(DetailEnhancerVideoNdkUnitTest, get_parameter_01, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + ret = OH_VideoProcessing_GetParameter(instance, parameter); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// get parameter normal +HWTEST_F(DetailEnhancerVideoNdkUnitTest, get_parameter_02, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = nullptr; + ret = OH_VideoProcessing_GetParameter(instance, parameter); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// get parameter normal after set +HWTEST_F(DetailEnhancerVideoNdkUnitTest, get_parameter_03, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameterSetted = OH_AVFormat_Create(); + OH_AVFormat* parameterGetted = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameterSetted, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_VideoProcessing_SetParameter(instance, parameterSetted); + ret = OH_VideoProcessing_GetParameter(instance, parameterGetted); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// set surface with nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, set_surface_01, TestSize.Level1) +{ + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OHNativeWindow* window; + OHNativeWindow* window2; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + OH_VideoProcessing_GetSurface(instance, &window); + OH_VideoProcessing_GetSurface(instance2, &window2); + auto ret = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// set surface with a non nullptr window +HWTEST_F(DetailEnhancerVideoNdkUnitTest, set_surface_02, TestSize.Level1) +{ + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OHNativeWindow* window = nullptr; + OH_VideoProcessing_Create(&instance, createType); + auto ret = OH_VideoProcessing_SetSurface(instance, window); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// get surface nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, get_surface_01, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OHNativeWindow* window; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// set surfaceto a non nullptr window +HWTEST_F(DetailEnhancerVideoNdkUnitTest, get_surface_02, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + auto detailEnhVideo = DetailEnhancerVideo::Create(); + auto surface = detailEnhVideo->GetInputSurface(); + OHNativeWindow* window = CreateNativeWindowFromSurface(&surface); + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_VideoProcessing_SetParameter(instance, parameter); + ret = OH_VideoProcessing_SetSurface(instance, window); + ret = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// create callback and register +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_01, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// create callback and register null func +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_02, TestSize.Level1) +{ + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, nullptr); + OH_VideoProcessingCallback_BindOnState(callback, nullptr); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, nullptr); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + auto ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); + OH_VideoProcessingCallback_Destroy(callback); +} + +// create and destroy +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_03, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_VideoProcessing_SetParameter(instance, parameter); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + void* userData = nullptr; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, userData); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + ret = OH_VideoProcessingCallback_Destroy(callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// only destroy call back +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_04, TestSize.Level1) +{ + VideoProcessing_Callback* callback = nullptr; + VideoProcessing_ErrorCode ret = OH_VideoProcessingCallback_Destroy(callback); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); +} + +// create callback and register but instance is nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_05, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + OH_VideoProcessing* instance = nullptr; + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// create callback and register but callback is nullptr +HWTEST_F(DetailEnhancerVideoNdkUnitTest, callback_06, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + EXPECT_NE(ret, VIDEO_PROCESSING_SUCCESS); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start processing with flush surface +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_01, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + OH_VideoProcessing_RenderOutputBuffer(instance, 1); +} + +// start processing with flush surface then stop +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_02, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start repeatedly +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_03, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + void* userData = nullptr; + OH_VideoProcessing_RegisterCallback(instance, callback, userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_Start(instance); + EXPECT_NE(res, VIDEO_PROCESSING_SUCCESS); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessing_Stop(instance); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start but not have buffer callbacks +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_04, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, nullptr); + void* userData = nullptr; + OH_VideoProcessing_RegisterCallback(instance, callback, userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start and destroy while still running +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_05, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + void* userData = nullptr; + OH_VideoProcessing_RegisterCallback(instance, callback, userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + res = OH_VideoProcessingCallback_Destroy(callback); + EXPECT_NE(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessingCallback_Destroy(nullptr); + EXPECT_NE(res, VIDEO_PROCESSING_SUCCESS); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start but no callbacks binded +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_06, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + void* userData = nullptr; + OH_VideoProcessing_RegisterCallback(instance, callback, userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_NE(res, VIDEO_PROCESSING_SUCCESS); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + OH_VideoProcessing_RenderOutputBuffer(instance, 1); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// start processing with flush surface then stop and start again +HWTEST_F(DetailEnhancerVideoNdkUnitTest, start_07, TestSize.Level1) +{ + OHNativeWindowBuffer *ohNativeWindowBuffer; + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + OH_VideoProcessing_Create(&instance, createType); + OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + OH_VideoProcessingCallback_Create(&callback); + OH_VideoProcessingCallback_BindOnError(callback, OnError); + OH_VideoProcessingCallback_BindOnState(callback, OnState); + OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + OH_VideoProcessing_SetParameter(instance, parameter); + + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + + VideoProcessing_ErrorCode res = OH_VideoProcessing_GetSurface(instance, &window); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + + int fenceFd = -1; + auto ret1 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret1, VPE_ALGO_ERR_OK); + auto ret2 = OH_NativeWindow_NativeWindowHandleOpt(window, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret2, VPE_ALGO_ERR_OK); + auto ret3 = OH_NativeWindow_NativeWindowRequestBuffer(window, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret3, VPE_ALGO_ERR_OK); + auto ret4 = FlushSurf(ohNativeWindowBuffer, window); + ASSERT_EQ(ret4, VPE_ALGO_ERR_OK); + + OH_VideoProcessing_Stop(instance); + WaitForStop(); + res = OH_VideoProcessing_Start(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + FlushSurf(ohNativeWindowBuffer, window); + OH_VideoProcessing_Stop(instance); + WaitForStop(); + OH_VideoProcessingCallback_Destroy(callback); + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_DeinitializeEnvironment(); +} + +// simple output buffer check +HWTEST_F(DetailEnhancerVideoNdkUnitTest, output_buffer_1, TestSize.Level1) +{ + OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x4; + OH_VideoProcessing* instance = nullptr; + auto res = OH_VideoProcessing_Create(&instance, createType); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + OH_VideoProcessing_RenderOutputBuffer(instance, 0); + res = OH_VideoProcessing_Destroy(instance); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); + res = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(res, VIDEO_PROCESSING_SUCCESS); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/image_processing/BUILD.gn b/test/unittest/image_processing/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..d49db4729c47a6ca3ba07da49ad01d127a220da6 --- /dev/null +++ b/test/unittest/image_processing/BUILD.gn @@ -0,0 +1,62 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("image_processing_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/DetailEnhancer/sample", + "$ALGORITHM_DIR/detail_enhancer/include", + "$FRAMEWORK_DIR/capi/image_processing/detail_enhancer/include", + "$INTERFACES_DIR/kits/c", + "$FRAMEWORK_DIR/capi/image_processing/include", + "//foundation/multimedia/media_foundation/interface/kits/c", + "//foundation/multimedia/image_framework/interfaces/kits/native/include/image" + ] + + sources = [ "image_processing_unit_test.cpp" ] + + deps = [ + + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:native_media_core", + "media_foundation:image_processing", + "image_framework:pixelmap", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/unittest/image_processing/image_processing_unit_test.cpp b/test/unittest/image_processing/image_processing_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0691a0333d07552ac38c3bc0f367af642a5490b5 --- /dev/null +++ b/test/unittest/image_processing/image_processing_unit_test.cpp @@ -0,0 +1,369 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "image_processing.h" +#include "image_processing_types.h" +#include "native_avformat.h" +#include "pixelmap_native.h" +#include "image_processing_factory.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class ImageProcessingUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); +}; + +void ImageProcessingUnitTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void ImageProcessingUnitTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void ImageProcessingUnitTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void ImageProcessingUnitTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +void CreateEmptyPixelmap(OH_PixelmapNative** pixelMap, int32_t width, int32_t height, int format) +{ + OH_Pixelmap_InitializationOptions* options = nullptr; + (void)OH_PixelmapInitializationOptions_Create(&options); + (void)OH_PixelmapInitializationOptions_SetWidth(options, width); + (void)OH_PixelmapInitializationOptions_SetHeight(options, height); + (void)OH_PixelmapInitializationOptions_SetPixelFormat(options, format); + (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelMap); +} + +HWTEST_F(ImageProcessingUnitTest, create_instance_01, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, deinitialize_02, TestSize.Level1) +{ + auto ret = OH_ImageProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, create_instance_02, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Create(&instance, 11); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, destroy_instance_01, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + ret = OH_ImageProcessing_Destroy(instance); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, destroy_instance_02, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Destroy(instance); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +/** + * @tc.number : VPE_IMAGE_API_TEST_0140 + * @tc.name : call OH_ImageProcessing_IsCompositionSupported + * @tc.desc : function test + */ +HWTEST(VpeImageApiTest, VPE_IMAGE_API_TEST_0140, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo SRC_INFO; + ImageProcessing_ColorSpaceInfo SRC_GAIN_INFO; + ImageProcessing_ColorSpaceInfo DST_INFO; + DST_INFO.colorSpace = 9; + DST_INFO.metadataType = 3; + DST_INFO.pixelFormat = 10; + SRC_INFO.colorSpace = 3; + SRC_INFO.metadataType = 1; + SRC_INFO.pixelFormat = 3; + bool ret = OH_ImageProcessing_IsCompositionSupported(&SRC_INFO, &SRC_GAIN_INFO, &DST_INFO); + ASSERT_TRUE(ret); +} +HWTEST(VpeImageApiTest, VPE_IMAGE_API_TEST_0141, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo SRC_INFO; + ImageProcessing_ColorSpaceInfo SRC_GAIN_INFO; + ImageProcessing_ColorSpaceInfo DST_INFO; + SRC_INFO.colorSpace = 9; + SRC_INFO.metadataType = 3; + SRC_INFO.pixelFormat = 10; + DST_INFO.colorSpace = 3; + DST_INFO.metadataType = 1; + DST_INFO.pixelFormat = 3; + bool ret = OH_ImageProcessing_IsDecompositionSupported(&SRC_INFO, &DST_INFO, &SRC_GAIN_INFO); + ASSERT_TRUE(ret); +} +HWTEST(VpeImageApiTest, VPE_IMAGE_API_TEST_0142, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo SRC_INFO; + ImageProcessing_ColorSpaceInfo DST_INFO; + SRC_INFO.colorSpace = 3; + SRC_INFO.metadataType = 0; + SRC_INFO.pixelFormat = 3; + DST_INFO.colorSpace = 4; + DST_INFO.metadataType = 0; + DST_INFO.pixelFormat = 3; + bool ret = OH_ImageProcessing_IsColorSpaceConversionSupported(&SRC_INFO, &DST_INFO); + ASSERT_TRUE(ret); +} +HWTEST(VpeImageApiTest, VPE_IMAGE_API_TEST_0143, TestSize.Level0) +{ + ImageProcessing_ColorSpaceInfo SRC_INFO; + SRC_INFO.colorSpace = 9; + SRC_INFO.metadataType = 3; + SRC_INFO.pixelFormat = 10; + bool ret = OH_ImageProcessing_IsMetadataGenerationSupported(&SRC_INFO); + ASSERT_TRUE(ret); +} +HWTEST_F(ImageProcessingUnitTest, set_parameter_01, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + OH_AVFormat* parameter = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_SetParameter(instance, parameter); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, set_parameter_02, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = nullptr; + ret = OH_ImageProcessing_SetParameter(instance, parameter); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); +} + +HWTEST_F(ImageProcessingUnitTest, set_parameter_03, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, 10); + ret = OH_ImageProcessing_SetParameter(instance, parameter); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, get_parameter_01, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + OH_AVFormat* parameter = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_GetParameter(instance, parameter); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, get_parameter_02, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = nullptr; + ret = OH_ImageProcessing_GetParameter(instance, parameter); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_INVALID_PARAMETER); +} + +HWTEST_F(ImageProcessingUnitTest, get_parameter_03, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_ImageProcessing_SetParameter(instance, parameter); + ret = OH_ImageProcessing_GetParameter(instance, parameter); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, get_parameter_04, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = OH_AVFormat_Create(); + ret = OH_ImageProcessing_GetParameter(instance, parameter); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_01, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_EnhanceDetail(instance, srcImg, dstImg); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_02, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + CreateEmptyPixelmap(&srcImg, 720, 960, 4); + CreateEmptyPixelmap(&dstImg, 2880, 3840, 4); + ImageProcessing_ErrorCode ret = OH_ImageProcessing_EnhanceDetail(instance, srcImg, dstImg); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_03, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_AVFormat* parameter = OH_AVFormat_Create(); + OH_AVFormat_SetIntValue(parameter, IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH); + ret = OH_ImageProcessing_SetParameter(instance, parameter); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + CreateEmptyPixelmap(&srcImg, 720, 960, 4); + CreateEmptyPixelmap(&dstImg, 2880, 3840, 4); + ret = OH_ImageProcessing_EnhanceDetail(instance, srcImg, dstImg); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_04, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + CreateEmptyPixelmap(&srcImg, 720, 960, 4); + CreateEmptyPixelmap(&dstImg, 2880, 3840, 4); + ret = OH_ImageProcessing_EnhanceDetail(instance, srcImg, dstImg); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_05, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + CreateEmptyPixelmap(&srcImg, 720, 960, 9); + CreateEmptyPixelmap(&dstImg, 2880, 3840, 9); + ret = OH_ImageProcessing_EnhanceDetail(instance, srcImg, dstImg); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, initialize, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + EXPECT_EQ(ret, IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); +} + +HWTEST_F(ImageProcessingUnitTest, deinitialize, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + ret = OH_ImageProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_07, TestSize.Level1) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_COMPOSITION); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + OH_PixelmapNative* gainmap = nullptr; + CreateEmptyPixelmap(&srcImg, 3840, 2160, 3); + CreateEmptyPixelmap(&dstImg, 3840, 2160, 10); + CreateEmptyPixelmap(&gainmap, 1920, 1080, 3); + if (dstImg == nullptr) { + return; + } + ret = OH_ImageProcessing_Compose(instance, srcImg, gainmap, dstImg); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_ImageProcessing_DeinitializeEnvironment(); +} +HWTEST_F(ImageProcessingUnitTest, process_08, TestSize.Level1) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DECOMPOSITION); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + OH_PixelmapNative* gainmap = nullptr; + CreateEmptyPixelmap(&srcImg, 3840, 2160, 10); + CreateEmptyPixelmap(&dstImg, 3840, 2160, 3); + CreateEmptyPixelmap(&gainmap, 1920, 1080, 3); + if (srcImg == nullptr) { + return; + } + ret = OH_ImageProcessing_Decompose(instance, srcImg, dstImg, gainmap); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_ImageProcessing_DeinitializeEnvironment(); +} +HWTEST_F(ImageProcessingUnitTest, process_09, TestSize.Level1) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION); + OH_PixelmapNative* srcImg = nullptr; + OH_PixelmapNative* dstImg = nullptr; + CreateEmptyPixelmap(&srcImg, 3840, 2160, 3); + CreateEmptyPixelmap(&dstImg, 3840, 2160, 3); + ret = OH_ImageProcessing_ConvertColorSpace(instance, srcImg, dstImg); + if (ret != 0) { + return; + } + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_ImageProcessing_DeinitializeEnvironment(); +} +HWTEST_F(ImageProcessingUnitTest, process_10, TestSize.Level1) +{ + OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_METADATA_GENERATION); + OH_PixelmapNative* srcImg = nullptr; + CreateEmptyPixelmap(&srcImg, 3840, 2160, 10); + if (srcImg == nullptr) { + return; + } + ret = OH_ImageProcessing_GenerateMetadata(instance, srcImg); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); + OH_ImageProcessing_DeinitializeEnvironment(); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/metadata_gen_video_ndk/BUILD.gn b/test/unittest/metadata_gen_video_ndk/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..17069ac797f2eca2e063c988a153cff4b5e5238a --- /dev/null +++ b/test/unittest/metadata_gen_video_ndk/BUILD.gn @@ -0,0 +1,77 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("metadata_gen_video_ndk_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$ALGORITHM_DIR/common/include", + "$ALGORITHM_DIR/extension_manager/include", + "$INTERFACES_INNER_API_DIR", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample", + "$ALGORITHM_DIR/colorspace_converter/include", + "$FRAMEWORK_DIR/capi/video_processing/include", + "$FRAMEWORK_DIR/capi/video_processing/colorspace_converter/include/", + "$ALGORITHM_DIR/colorspace_converter_video/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", + "$TEST_UTILS_PATH/ColorSpaceConverter/sample/video", + "//foundation/multimedia/media_foundation/interface/kits/c/", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c/", + "//foundation/multimedia/media_foundation/video_processing_engine/", + "//foundation/multimedia/media_foundation/video_processing_engine/framework/dfx/include/", + "//foundation/multimedia/media_foundation/video_processing_engine/framework/capi/video_processing/include/", + "//foundation/multimedia/media_foundation/video_processing_engine/interface/inner_api/", + "//foundation/multimedia/video_processing_engine/interfaces/inner_api/", + "//foundation/multimedia/video_processing_engine/framework/algorithm/colorspace_converter_video/include/", + "//foundation/graphic/graphic_2d/interfaces/inner_api/", + "//foundation/graphic/graphic_2d_ext/ohcore/graphic_compat_layer/include/utils/", + ] + + sources = [ "metadata_gen_video_ndk_unit_test.cpp" ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine", + "$FRAMEWORK_DIR:video_processing_capi_impl", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "graphic_surface:surface", + "graphic_surface:sync_fence", + "graphic_2d:libgraphic_utils", + "graphic_2d:librender_service_client", + "hilog:libhilog", + "hitrace:hitrace_meter", + "media_foundation:native_media_core", + "media_foundation:video_processing", + "ipc:ipc_core", + "memory_utils:libdmabufheap", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} diff --git a/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp b/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3f9c58c4cf1609ce751b9cac05f9b86fcf67c6bf --- /dev/null +++ b/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp @@ -0,0 +1,284 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include "securec.h" +#include + +#include "native_avformat.h" +#include "native_window.h" +#include "surface/window.h" +#include "external_window.h" +#include "v1_0/cm_color_space.h" + +#include "video_processing.h" +#include "video_processing_types.h" +#include "metadata_generator_video.h" +#include "algorithm_common.h" +#include "algorithm_common.h" + +constexpr uint32_t DEFAULT_WIDTH = 1920; +constexpr uint32_t DEFAULT_HEIGHT = 1080; +constexpr uint32_t DEFAULT_BYTE = 32; +using namespace testing::ext; +using namespace OHOS; +using namespace OHOS::Media; +using namespace std; +using namespace OHOS::Media::VideoProcessingEngine; +using namespace OHOS::HDI::Display::Graphic::Common::V1_0; +namespace { + +class MetadataGeneratorVideoNdkImplUnitTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() + { + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = DEFAULT_BYTE; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; + }; + void TearDown() + { + }; + + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + int32_t fence_ = -1; + CM_ColorSpaceType inColspc_ = CM_BT2020_PQ_LIMIT; + CM_HDR_Metadata_Type inMetaType_ = CM_VIDEO_HDR_VIVID; + GraphicPixelFormat surfacePixelFmt_ = GRAPHIC_PIXEL_FMT_YCBCR_P010; + + void SetMeatadata(sptr &buffer, uint32_t value); + void SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); + VideoProcessing_ErrorCode Process(); +}; + +void OnError(OH_VideoProcessing *handle, VideoProcessing_ErrorCode errorCode, void* userData) +{ + (void)handle; + (void)errorCode; + (void)userData; +} +void OnState(OH_VideoProcessing *handle, VideoProcessing_State state, void* userData) +{ + (void)handle; + (void)state; + (void)userData; +} +void OnNewOutputBuffer(OH_VideoProcessing *handle, uint32_t index, void* userData) +{ + (void)handle; + (void)index; + (void)userData; +} + +void MetadataGeneratorVideoNdkImplUnitTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + uint32_t err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + if (err != 0) { + printf("Buffer set metadata info, ret: %d\n", err); + } +} + +void MetadataGeneratorVideoNdkImplUnitTest::SetMeatadata(sptr &buffer, + CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + uint32_t err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + if (err != 0) { + printf("Buffer set colorspace info, ret: %d\n", err); + } +} + +VideoProcessing_ErrorCode MetadataGeneratorVideoNdkImplUnitTest::Process() +{ + VideoProcessing_ErrorCode ret = VIDEO_PROCESSING_SUCCESS; + sptr buffer; + CM_ColorSpaceInfo inColspcInfo = { + static_cast((inColspc_ & COLORPRIMARIES_MASK) >> COLORPRIMARIES_OFFSET), + static_cast((inColspc_ & TRANSFUNC_MASK) >> TRANSFUNC_OFFSET), + static_cast((inColspc_ & MATRIX_MASK) >> MATRIX_OFFSET), + static_cast((inColspc_ & RANGE_MASK) >> RANGE_OFFSET) + }; + ret = OH_VideoProcessing_InitializeEnvironment(); + int createType = 0x2; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + ret = OH_VideoProcessing_SetSurface(instance, window2); + ret = OH_VideoProcessing_Start(instance); + if (ret != VIDEO_PROCESSING_SUCCESS) { + return ret; + } + int videoSurfaceBuffNum = 1; + int sleepTime = 2; + for (int i = 0; i < videoSurfaceBuffNum; i++) { + window->surface->RequestBuffer(buffer, fence_, requestCfg_); + SetMeatadata(buffer, inColspcInfo); + SetMeatadata(buffer, (uint32_t)inMetaType_); + window->surface->FlushBuffer(buffer, fence_, flushCfg_); + OH_VideoProcessing_SetSurface(instance, window2); + sleep(sleepTime); + } + OH_VideoProcessing_Destroy(instance); + OH_VideoProcessing_Destroy(instance2); + OH_VideoProcessing_DeinitializeEnvironment(); + return ret; +} + +HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_testFunSupportedMetadata, TestSize.Level1) +{ + bool resultSupported = false; + resultSupported = OH_VideoProcessing_IsColorSpaceConversionSupported(nullptr, nullptr); + EXPECT_EQ(resultSupported, false); + resultSupported = OH_VideoProcessing_IsMetadataGenerationSupported(nullptr); + EXPECT_EQ(resultSupported, false); + const int formatListNum = 3; + int formatListHDRNative[formatListNum] = {NATIVEBUFFER_PIXEL_FMT_YCBCR_P010, NATIVEBUFFER_PIXEL_FMT_YCRCB_P010, + NATIVEBUFFER_PIXEL_FMT_RGBA_1010102}; + const int colorMetaMetaListNum = 4; + const int colorMetaParaNum = 2; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + int colorMetaListSdr2Sdr[colorMetaMetaListNum][colorMetaParaNum] = { + {OH_VIDEO_HDR_HDR10, OH_COLORSPACE_BT2020_PQ_LIMIT}, + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_PQ_LIMIT}, + {OH_VIDEO_HDR_HLG, OH_COLORSPACE_BT2020_HLG_LIMIT}, + {OH_VIDEO_HDR_VIVID, OH_COLORSPACE_BT2020_HLG_LIMIT} + }; + VideoProcessing_ColorSpaceInfo sourceVideoInfo = {-1, -1, -1}; + for (int n = 0; n < colorMetaMetaListNum; n++) { + sourceVideoInfo.metadataType = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInMetaNum]); + sourceVideoInfo.colorSpace = static_cast(colorMetaListSdr2Sdr[n][colorMetaParaInColorNum]); + for (int i = 0; i < formatListNum; i++) { + sourceVideoInfo.pixelFormat = static_cast(formatListHDRNative[i]); + resultSupported = OH_VideoProcessing_IsMetadataGenerationSupported(&sourceVideoInfo); + EXPECT_EQ(resultSupported, true); + } + } + sourceVideoInfo.metadataType = static_cast(OH_VIDEO_HDR_HLG); + sourceVideoInfo.colorSpace = static_cast(OH_COLORSPACE_BT2020_PQ_LIMIT); + sourceVideoInfo.pixelFormat = static_cast(NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP); + resultSupported = OH_VideoProcessing_IsMetadataGenerationSupported(&sourceVideoInfo); + EXPECT_EQ(resultSupported, false); +} + +HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level1) +{ + VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + int createType = 0x2; + OH_VideoProcessing* instance = nullptr; + OH_VideoProcessing* instance2 = nullptr; + ret = OH_VideoProcessing_Create(&instance, createType); + ret = OH_VideoProcessing_Create(&instance2, createType); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + VideoProcessing_Callback* callback = nullptr; + ret = OH_VideoProcessingCallback_Create(&callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnError(callback, OnError); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnState(callback, OnState); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OHNativeWindow* window = nullptr; + OHNativeWindow* window2 = nullptr; + ret = OH_VideoProcessing_GetSurface(instance, &window); + ret = OH_VideoProcessing_GetSurface(instance2, &window2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + auto userData = VIDEO_PROCESSING_STATE_STOPPED; + ret = OH_VideoProcessing_RegisterCallback(instance, callback, &userData); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + OH_AVFormat* parameter = OH_AVFormat_Create(); + ret = OH_VideoProcessing_SetParameter(instance, parameter); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessing_GetParameter(instance, parameter); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessing_SetSurface(instance, window2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + for (int i = 0; i < 2; i++) { + ret = OH_VideoProcessing_Start(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Stop(instance); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + ret = OH_VideoProcessing_RenderOutputBuffer(instance, 0); + EXPECT_EQ(ret, VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED); + ret = OH_VideoProcessingCallback_Destroy(callback); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_Destroy(instance); + ret = OH_VideoProcessing_Destroy(instance2); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + ret = OH_VideoProcessing_DeinitializeEnvironment(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); +} + +HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_metadataGen, TestSize.Level1) +{ + const int formatListNum = 3; + int formatListHDR[formatListNum] = {GRAPHIC_PIXEL_FMT_YCBCR_P010, GRAPHIC_PIXEL_FMT_YCRCB_P010, + GRAPHIC_PIXEL_FMT_RGBA_1010102}; + const int colorMetaMetaListNum = 4; + const int colorMetaParaNum = 2; + const int colorMetaParaInMetaNum = 0; + const int colorMetaParaInColorNum = 1; + int colorMetaList[colorMetaMetaListNum][colorMetaParaNum] = { + {CM_VIDEO_HDR10, CM_BT2020_PQ_LIMIT}, + {CM_VIDEO_HDR_VIVID, CM_BT2020_PQ_LIMIT}, + {CM_VIDEO_HLG, CM_BT2020_HLG_LIMIT}, + {CM_VIDEO_HDR_VIVID, CM_BT2020_HLG_LIMIT} + }; + for (int n = 0; n < colorMetaMetaListNum; n++) { + inMetaType_ = static_cast(colorMetaList[n][colorMetaParaInMetaNum]); + inColspc_ = static_cast(colorMetaList[n][colorMetaParaInColorNum]); + for (int i = 0; i < formatListNum; i++) { + surfacePixelFmt_ = static_cast(formatListHDR[i]); + requestCfg_.format = surfacePixelFmt_; + VideoProcessing_ErrorCode ret = Process(); + EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); + } + } +} +} \ No newline at end of file diff --git a/test/unittest/utils/util_define.cpp b/test/unittest/utils/util_define.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6bfc06b60270f997fa7854f4f296eb90e941d77f --- /dev/null +++ b/test/unittest/utils/util_define.cpp @@ -0,0 +1,364 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "util_define.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include "securec.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +using namespace std; +constexpr int32_t STRIDE = 2; + +void ReadFileYuv42016(std::string yuvFilePath, sptr &buffer, int width, int height) +{ + unique_ptr yuvFile = make_unique(); + CHECK_AND_RETURN_LOG(yuvFile != nullptr, "Fatal: No memory"); + yuvFile->open(yuvFilePath, ios::in | ios::binary); + yuvFile->seekg(0, ios::beg); + long ySize = width * height; + long uvSize = width * height / 4; + uint16_t *yBuffer = new uint16_t[ySize + STRIDE * uvSize]; + uint16_t *uBuffer = new uint16_t[uvSize]; + uint16_t *vBuffer = new uint16_t[uvSize]; + yuvFile->read(reinterpret_cast(yBuffer), ySize * sizeof(uint16_t)); + yuvFile->read(reinterpret_cast(uBuffer), uvSize * sizeof(uint16_t)); + yuvFile->read(reinterpret_cast(vBuffer), uvSize * sizeof(uint16_t)); + + memcpy_s(reinterpret_cast(buffer->GetVirAddr()), (ySize + uvSize * STRIDE) * sizeof(uint16_t), yBuffer, + (ySize + uvSize * STRIDE) * sizeof(uint16_t)); + + delete[] yBuffer; + delete[] uBuffer; + delete[] vBuffer; + yuvFile->close(); +} + +void ReadFileYuv42010ToNv1216(std::string yuvFilePath, sptr &buffer, int width, int height) +{ + unique_ptr yuvFile = make_unique(); + CHECK_AND_RETURN_LOG(yuvFile != nullptr, "Fatal: No memory"); + yuvFile->open(yuvFilePath, ios::in | ios::binary); + yuvFile->seekg(0, ios::beg); + long ySize = width * height; + long uvSize = width * height / 4; + uint16_t *yBuffer = new uint16_t[ySize + STRIDE * uvSize]; + uint16_t *uBuffer = new uint16_t[uvSize]; + uint16_t *vBuffer = new uint16_t[uvSize]; + yuvFile->read(reinterpret_cast(yBuffer), ySize * sizeof(uint16_t)); + yuvFile->read(reinterpret_cast(uBuffer), uvSize * sizeof(uint16_t)); + yuvFile->read(reinterpret_cast(vBuffer), uvSize * sizeof(uint16_t)); + + int num10To16 = 6; + int numTwo = 2; + uint16_t *uvBufferPtr = &yBuffer[ySize]; + for (int i = 0; i < ySize; i++) { + yBuffer[i] = yBuffer[i] << num10To16; + } + for (int j = 0; j < uvSize; j++) { + uvBufferPtr[numTwo * j] = uBuffer[j] << num10To16; + uvBufferPtr[numTwo * j + 1] = vBuffer[j] << num10To16; + } + + memcpy_s(reinterpret_cast(buffer->GetVirAddr()), (ySize + uvSize * STRIDE) * sizeof(uint16_t), yBuffer, + (ySize + uvSize * STRIDE) * sizeof(uint16_t)); + + delete[] yBuffer; + delete[] uBuffer; + delete[] vBuffer; + yuvFile->close(); +} + +void ReadInputFile(std::string yuvFilePath, sptr &buffer, int frameSize) +{ + unique_ptr yuvFile = make_unique(); + CHECK_AND_RETURN_LOG(yuvFile != nullptr, "Fatal: No memory"); + yuvFile->open(yuvFilePath, ios::in | ios::binary); + yuvFile->seekg(0, ios::beg); + yuvFile->read(reinterpret_cast(buffer->GetVirAddr()), frameSize); + yuvFile->close(); +} + +static void SaveMetaDataToBin(int frameId, const char *fileName, unsigned char *metadataPayload, + int metadataPayloadSize) +{ + FILE *fileOut = nullptr; + if (frameId == 0) { + fileOut = fopen(fileName, "wb"); + } else { + fileOut = fopen(fileName, "ab+"); + } + if (fileOut == nullptr) { + printf("open file[%s] Error:%s!", fileName, "error"); + return; + } + + uint32_t mdLen = static_cast(metadataPayloadSize); + int len = fwrite(&mdLen, sizeof(uint32_t), 1, fileOut); + if (len != 1) { + printf("write file Error:%s with mdLen!", fileName); + } + len = fwrite(metadataPayload, sizeof(uint8_t), metadataPayloadSize, fileOut); + if (len != metadataPayloadSize) { + printf("write file Error:%s!", fileName); + } + fclose(fileOut); +} + +void SaveMetadataFromSurBuffer(const sptr &input, int frame, const string &metadataBin) +{ + std::vector inMetaData; + input->GetMetadata(ATTRKEY_HDR_DYNAMIC_METADATA, inMetaData); + unsigned char *metaData = inMetaData.data(); + uint32_t meteDataLength = inMetaData.size(); + printf("frame=%d, imeteDataLength_demo = %u\n", frame, meteDataLength); + SaveMetaDataToBin(frame, metadataBin.c_str(), metaData, meteDataLength); +} + +void WriteOutFile(int frame, const string &outYuvFileName, const sptr &output, int frameSize) +{ + std::unique_ptr outputYuv; + if (frame == 0) { + outputYuv = + std::make_unique(outYuvFileName.c_str(), std::ios::binary | std::ios::out | std::ios::trunc); + } else { + outputYuv = std::make_unique(outYuvFileName.c_str(), std::ios::binary | std::ios::app); + } + outputYuv->write(static_cast(output->GetVirAddr()), frameSize); +} + + +std::string GetPixFmtString(GraphicPixelFormat pixfmt) +{ + switch (pixfmt) { + case GRAPHIC_PIXEL_FMT_YCBCR_P010: + return "nv12_10"; + case GRAPHIC_PIXEL_FMT_YCRCB_P010: + return "nv21_10"; + case GRAPHIC_PIXEL_FMT_RGBA_1010102: + return "rgba1010102"; + default: + return "none"; + } +} + +std::string GetColspcStringPrimaries(CM_ColorSpaceInfo colorSpaceInfo) +{ + std::string str = ""; + switch (colorSpaceInfo.primaries) { + case COLORPRIMARIES_BT709: + str = "_709"; + break; + case COLORPRIMARIES_BT601_P: + str = "_601p"; + break; + case COLORPRIMARIES_BT601_N: + str = "_601n"; + break; + case COLORPRIMARIES_BT2020: + str = "_2020"; + break; + default: + str = "_none"; + break; + } + return str; +} +std::string GetColspcStringTrans(CM_ColorSpaceInfo colorSpaceInfo) +{ + std::string str = ""; + switch (colorSpaceInfo.transfunc) { + case TRANSFUNC_BT709: + str = "_709"; + break; + case TRANSFUNC_SRGB: + str = "_srgb"; + break; + case TRANSFUNC_LINEAR: + str = "_linear"; + break; + case TRANSFUNC_PQ: + str = "_pq"; + break; + case TRANSFUNC_HLG: + str = "_hlg"; + break; + case TRANSFUNC_ADOBERGB: + str = "_adobergb"; + break; + case TRANSFUNC_GAMMA2_2: + str = "_gamma22"; + break; + case TRANSFUNC_GAMMA2_4: + str = "_gamma24"; + break; + default: + str = "_none"; + break; + } + return str; +} +std::string GetColspcStringRange(CM_ColorSpaceInfo colorSpaceInfo) +{ + std::string str = ""; + switch (colorSpaceInfo.range) { + case RANGE_FULL: + str = "_full"; + break; + case RANGE_LIMITED: + str = "_limited"; + break; + default: + str = "_none"; + break; + } + return str; +} +std::string GetColspcString(CM_ColorSpaceInfo colorSpaceInfo) +{ + std::string str = ""; + std::string strPrima = GetColspcStringPrimaries(colorSpaceInfo); + str = strPrima; + std::string strTrans = GetColspcStringTrans(colorSpaceInfo); + str += strTrans; + std::string strRange = GetColspcStringRange(colorSpaceInfo); + str += strRange; + return str; +} + +std::string GetMetadataString(CM_HDR_Metadata_Type metaType) +{ + switch (metaType) { + case CM_VIDEO_HLG: + return "_vHlg"; + case CM_VIDEO_HDR10: + return "_vHdr10"; + case CM_VIDEO_HDR_VIVID: + return "_vHdrVivid"; + case CM_IMAGE_HDR_VIVID_DUAL: + return "_iHdrVividD"; + case CM_IMAGE_HDR_VIVID_SINGLE: + return "_iHdrVividS"; + case CM_IMAGE_HDR_ISO_DUAL: + return "_iHdrIsoD"; + case CM_IMAGE_HDR_ISO_SINGLE: + return "_iHdrIsoS"; + default: + return "none"; + } +} + +std::string GetOutFileName(const string &baseName, const ParameterBase ¶m) +{ + string outputName = ""; + outputName = baseName + "_iFmt" + GetPixFmtString(param.inPixFmt) + "_iCol" + GetColspcString(param.inColspcInfo) + + "_iMetaT" + GetMetadataString(param.inMetaType) + "_oFmt" + GetPixFmtString(param.outPixFmt) + "_oCol" + + GetColspcString(param.outColspcInfo) + "_oMeta" + GetMetadataString(param.outMetaType) + ".yuv"; + return outputName; +} + +void SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + uint32_t err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + printf("Buffer set metadata type, ret: %u\n", err); +} +void SetMeatadata(sptr &buffer, const CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + uint32_t err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + printf("Buffer set colorspace info, ret: %u\n", err); +} +void SetMeatadata(sptr &buffer, int key, const float &data) +{ + std::vector metadata; + metadata.resize(sizeof(float)); + (void)memcpy_s(metadata.data(), metadata.size(), &data, sizeof(float)); + uint32_t err = buffer->SetMetadata(key, metadata); + printf("Buffer set colorspace info, ret: %u\n", err); +} +void SetMeatadata(sptr &buffer, int key, const int &data) +{ + std::vector metadata; + metadata.resize(sizeof(int)); + (void)memcpy_s(metadata.data(), metadata.size(), &data, sizeof(int)); + uint32_t err = buffer->SetMetadata(key, metadata); + printf("Buffer set colorspace info, ret: %u\n", err); +} +void SetMeatadata(sptr &buffer, std::unique_ptr &metadataFile) +{ + if (!metadataFile->is_open()) { + printf("Metadata file is not open\n"); + return; + } + + std::vector metadata; + uint32_t metadataSize = 0; + + metadataFile->read(reinterpret_cast(&metadataSize), sizeof(uint32_t)); + if (metadataSize == 0) { + printf("Read metadata failed, get a size: %u\n", metadataSize); + return; + } + metadata.resize(metadataSize); + metadataFile->read(reinterpret_cast(metadata.data()), metadataSize); + + // Dump metadata + for (size_t idx = 0; idx < metadata.size(); idx++) { + uint32_t data = + (metadata[idx] << 24) + (metadata[idx + 1] << 16) + (metadata[idx + 2] << 8) + (metadata[idx + 3]); + (void)data; + } + + int32_t err = buffer->SetMetadata(ATTRKEY_HDR_DYNAMIC_METADATA, metadata); + printf("Buffer hdr dynamic metadata type, ret: %d\n", err); +} +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) +{ + auto buffer = SurfaceBuffer::Create(); + if (nullptr == buffer) { + printf("Create surface buffer failed\n"); + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = width; + inputCfg.height = height; + inputCfg.strideAlignment = width; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA; + inputCfg.format = pixelFormat; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + if (GSERROR_OK != err) { + printf("Alloc surface buffer failed\n"); + return nullptr; + } + return buffer; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/unittest/utils/util_define.h b/test/unittest/utils/util_define.h new file mode 100644 index 0000000000000000000000000000000000000000..021fefcee93030bdbee5847c0920f21b0a5029fa --- /dev/null +++ b/test/unittest/utils/util_define.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef VPE_UTIL_DEFINE_H +#define VPE_UTIL_DEFINE_H + +#include +#include +#include "graphic_common_c.h" +#include "algorithm_common.h" +#include "algorithm_errors.h" +#include "v1_0/cm_color_space.h" +#include "vpe_context.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +struct ParameterBase { + int modelType; + int width; + int height; + int widthGainmap; + int heightGainmap; + int metadatInPut; + CM_HDR_Metadata_Type inMetaType; // CM_METADATA_NONE, CM_VIDEO_HLG, CM_VIDEO_HDR10, CM_VIDEO_HDR_VIVID + CM_ColorSpaceInfo inColspcInfo; + GraphicPixelFormat inPixFmt; // nv12 10bit + CM_HDR_Metadata_Type outMetaType; + CM_ColorSpaceInfo outColspcInfo; + GraphicPixelFormat outPixFmt; + CM_HDR_Metadata_Type gainmapMetaType; + CM_ColorSpaceInfo gainmapColspcInfo; + GraphicPixelFormat gainmapPixFmt; + std::string yuvFilePath; + std::string metadataFilePath; + VPEContext context; +}; + +void ReadFileYuv42016(std::string yuvFilePath, sptr &buffer, int width, int height); +void ReadFileYuv42010ToNv1216(std::string yuvFilePath, sptr &buffer, int width, int height); +void ReadInputFile(std::string yuvFilePath, sptr &buffer, int frameSize); + +void SaveMetadataFromSurBuffer(const sptr &input, int frame, const std::string &metadataBin); +void WriteOutFile(int frame, const std::string &outYuvFileName, const sptr &output, int frameSize); +std::string GetPixFmtString(GraphicPixelFormat pixfmt); +std::string GetMetadataString(CM_HDR_Metadata_Type metaType); +std::string GetColspcString(CM_ColorSpaceInfo colorSpaceInfo); +std::string GetOutFileName(const std::string &baseName, const ParameterBase ¶m); +void SetMeatadata(sptr &buffer, uint32_t value); +void SetMeatadata(sptr &buffer, const CM_ColorSpaceInfo &colorspaceInfo); +void SetMeatadata(sptr &buffer, int key, const float &data); +void SetMeatadata(sptr &buffer, int key, const int &data); +void SetMeatadata(sptr &buffer, std::unique_ptr &metadataFile); +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_UTIL_DEFINE_H \ No newline at end of file diff --git a/test/unittest/video_variable_refreshrate_test/BUILD.gn b/test/unittest/video_variable_refreshrate_test/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..21c9c07afea16d0ce1487418133dbebd40c36376 --- /dev/null +++ b/test/unittest/video_variable_refreshrate_test/BUILD.gn @@ -0,0 +1,57 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("video_variable_refreshrate_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + boundary_sanitize = true + cfi = true + cfi_cross_dso = true + integer_overflow = true + ubsan = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/interfaces/inner_api", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/common/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/extension_manager/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/video_variable_refresh_rate/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/dfx/include", + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/unittest/utils", + ] + + sources = [ "video_variable_refreshrate_unit_test.cpp", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", + "drivers_interface_display:display_composer_idl_headers_1.1", + "drivers_interface_display:libdisplay_composer_hdi_impl", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "opencl-headers:libcl", + "video_processing_engine:videoprocessingengine", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine_ext" +} diff --git a/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp b/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c97a8addc3f7a24b8b2c20e0f909ed6049e20d9a --- /dev/null +++ b/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include "iostream" +#include "securec.h" +#include "vpe_trace.h" +#include "vpe_log.h" +#include "video_refreshrate_prediction.h" +#include "v2_0/buffer_handle_meta_key_type.h" +#include "v1_2/display_composer_type.h" + +using namespace std; +using namespace testing::ext; + +namespace { +const std::string MV_FILE = "3840x1608.pmv"; +const std::string UT_PROCESS_NAME = "video_variable_refreshrate_unit_test"; +} // namespace + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class VideoVariableRefreshRateUnitTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); +}; + +void VideoVariableRefreshRateUnitTest::SetUpTestCase(void) +{ + cout << "[VideoVariableRefreshRateUnitTest SetUpTestCase]: success!" << endl; +} + +void VideoVariableRefreshRateUnitTest::TearDownTestCase(void) +{ + cout << "[VideoVariableRefreshRateUnitTest TearDownTestCase]: " << endl; +} + +void VideoVariableRefreshRateUnitTest::SetUp(void) +{ + cout << "[VideoVariableRefreshRateUnitTest SetUp]: SetUp!!!" << endl; +} + +void VideoVariableRefreshRateUnitTest::TearDown(void) +{ + cout << "[VideoVariableRefreshRateUnitTest TearDown]: over!!!" << endl; +} + +uint32_t CalculateMaxMvBufferSize(uint32_t width, uint32_t height) +{ + uint32_t align = 64; + uint32_t multCoef = 1; + uint32_t diviCoef = 4; + uint32_t offset = 256; + uint32_t mvWidth = (width + align - 1) & (~(align - 1)); + uint32_t mvHeight = (height + align - 1) & (~(align - 1)); + return mvWidth * mvHeight * multCoef / diviCoef + offset; +} + +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height, uint8_t *mvBuffer, + int32_t mvLength) +{ + auto buffer = SurfaceBuffer::Create(); + if (nullptr == buffer) { + printf("Create surface buffer failed\n"); + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = width; + inputCfg.height = height; + inputCfg.strideAlignment = width; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER + | BUFFER_USAGE_HW_TEXTURE | HDI::Display::Composer::V1_2::HBM_USE_VIDEO_DEC_MV; + inputCfg.format = pixelFormat; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + + using namespace HDI::Display::Graphic::Common; + std::vector vec; + V2_0::BlobDataType data; + int32_t ret = buffer->GetMetadata(V2_0::ATTRKEY_VIDEO_DECODER_MV, vec); + CHECK_AND_RETURN_RET_LOG(ret == GSERROR_OK && (vec.size() == sizeof(V2_0::BlobDataType)), + nullptr, "VRR got decode mv type from handle failed"); + ret = memcpy_s(&data, sizeof(V2_0::BlobDataType), vec.data(), vec.size()); + if (GSERROR_OK != err) { + printf("Alloc surface buffer failed\n"); + return nullptr; + } + ret = memcpy_s((void *)(data.vaddr + data.offset), mvLength, mvBuffer, mvLength); + if (GSERROR_OK != err) { + printf("Alloc surface buffer failed\n"); + return nullptr; + } + printf("Alloc surface buffer with motion vecotr success\n"); + return buffer; +} + +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_init_01, TestSize.Level1) +{ + auto vrrPredictor = OHOS::Media::VideoProcessingEngine::VideoRefreshRatePrediction::Create(); + VPEAlgoErrCode ret = vrrPredictor->CheckVRRSupport(UT_PROCESS_NAME); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_init_02, TestSize.Level1) +{ + VideoRefreshRatePredictionHandle *vrrHandle = VideoRefreshRatePredictionCreate(); + int32_t ret = VideoRefreshRatePredictionCheckSupport(vrrHandle, UT_PROCESS_NAME.c_str()); + VideoRefreshRatePredictionDestroy(vrrHandle); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_implProcess_01, TestSize.Level1) +{ + VPEAlgoErrCode ret = VPE_ALGO_ERR_OK; + int32_t width = 3840; + int32_t height = 1608; + auto vrrPredictor = OHOS::Media::VideoProcessingEngine::VideoRefreshRatePrediction::Create(); + int mvLength = CalculateMaxMvBufferSize(width, height); + uint8_t *mvBuffer = (uint8_t *)malloc(mvLength); + sptr inputFrame = CreateSurfaceBuffer(GRAPHIC_PIXEL_FMT_RGBA_8888, + width, height, mvBuffer, mvLength); + ret = vrrPredictor->Process(inputFrame, 60, MOTIONVECTOR_TYPE_HEVC); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + free(mvBuffer); +} + +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_implProcess_02, TestSize.Level1) +{ + VPEAlgoErrCode ret = VPE_ALGO_ERR_OK; + int32_t width = 3840; + int32_t height = 1608; + auto vrrPredictor = OHOS::Media::VideoProcessingEngine::VideoRefreshRatePrediction::Create(); + int mvLength = CalculateMaxMvBufferSize(width, height); + uint8_t *mvBuffer = (uint8_t *)malloc(mvLength); + FILE *fp = fopen(MV_FILE.c_str(), "rb"); + if (fp == nullptr) { + printf("open motion vector file [%s] fail!\n", MV_FILE.c_str()); + return; + } + int frameIndex = 0; + while (1) { + int readCnt = fread(mvBuffer, 1, mvLength, fp); + if (readCnt < mvLength) { + break; + } + sptr inputFrame = CreateSurfaceBuffer(GRAPHIC_PIXEL_FMT_RGBA_8888, + width, height, mvBuffer, mvLength); + ret = vrrPredictor->Process(inputFrame, 60, MOTIONVECTOR_TYPE_HEVC); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + frameIndex++; + } + free(mvBuffer); + fclose(fp); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_implProcess_03, TestSize.Level1) +{ + int32_t width = 3840; + int32_t height = 1608; + VideoRefreshRatePredictionHandle *vrrHandle = VideoRefreshRatePredictionCreate(); + int32_t ret = VideoRefreshRatePredictionCheckSupport(vrrHandle, UT_PROCESS_NAME.c_str()); + int mvLength = CalculateMaxMvBufferSize(width, height); + uint8_t *mvBuffer = (uint8_t *)malloc(mvLength); + sptr inputFrame = CreateSurfaceBuffer(GRAPHIC_PIXEL_FMT_RGBA_8888, + width, height, mvBuffer, mvLength); + VideoRefreshRatePredictionProcess(vrrHandle, inputFrame->SurfaceBufferToNativeBuffer(), 60, MOTIONVECTOR_TYPE_HEVC); + VideoRefreshRatePredictionDestroy(vrrHandle); + free(mvBuffer); + EXPECT_NE(ret, VPE_ALGO_ERR_OK); +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/vpe_framework/BUILD.gn b/test/unittest/vpe_framework/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..aa71a82de03c88861e1120d882a4e31cef98eaad --- /dev/null +++ b/test/unittest/vpe_framework/BUILD.gn @@ -0,0 +1,63 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/test.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_unittest("vpe_framework_unit_test") { + module_out_path = UNIT_TEST_OUTPUT_PATH + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + cflags = VIDEO_PROCESSING_ENGINE_CFLAGS + + include_dirs = [ + "$VIDEO_PROCESSING_ENGINE_ROOT_DIR", + "$FRAMEWORK_DIR", + "$FRAMEWORK_DIR/common/include", + "$COLORSPACE_CONVERTER_DIR/include", + "$COLORSPACE_CONVERTER_DISPLAY_DIR/include", + "$DFX_DIR/include", + "$ALGORITHM_EXTENSION_MANAGER_DIR/include", + "$METADATA_GENERATOR_DIR/include", + "$PLUGIN_DIR/include", + "$INTERFACES_INNER_API_DIR", + "$SKIA_DIR", + ] + + sources = [ + # "vpe_framework_unit_test.cpp", + # "$PLUGIN_DIR/plugin_manager.cpp", + # "$ENGINE_DIR/video_processing_engine.cpp", + # "$DFX_DIR/trace.cpp", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "hilog:libhilog", + "hitrace:hitrace_meter", + "drivers_interface_display:display_commontype_idl_headers", + ] + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/unittest/vpe_framework/vpe_framework_unit_test.cpp b/test/unittest/vpe_framework/vpe_framework_unit_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f4da449c4e0bbdd11ea2a5433122c63290e4b1b5 --- /dev/null +++ b/test/unittest/vpe_framework/vpe_framework_unit_test.cpp @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "plugin_manager.h" +#include "video_processing_engine.h" +#include "vpe_errors.h" + +using namespace std; +using namespace testing::ext; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class VPEFrameworkUnitTest : public testing::Test { +public: + static void SetUpTestCase(void) {}; + static void TearDownTestCase(void) {}; + void SetUp() {}; + void TearDown() {}; +}; + +/** + * tc.number desc + * |01|01|01| + * ClassNo ------| | |------ FuncTestNo + * | + * FuncNo + */ + +/** + * @tc.number : 010101 + * @tc.name : PluginManager_Init + * @tc.desc : Test for PluginManager Init + */ +HWTEST_F(VPEFrameworkUnitTest, PluginManager_Init_010101, TestSize.Level1) +{ + std::unique_ptr pluginManager = std::make_unique(); + ASSERT_EQ(VPE_ERR_OK, pluginManager->Init()); +} + +/** + * @tc.number : 010201 + * @tc.name : PluginManager_FindColorSpaceConverterPlugin + * @tc.desc : Test for PluginManager FindColorSpaceConverterPlugin + */ +HWTEST_F(VPEFrameworkUnitTest, PluginManager_FindColorSpaceConverterPlugin_010201, TestSize.Level1) +{ + std::unique_ptr pluginManager = std::make_unique(); + ASSERT_EQ(VPE_ERR_OK, pluginManager->Init()); + + ColorSpaceDescription inputDesc = {}, outputDesc = {}; + auto ret = pluginManager->FindColorSpaceConverterPlugin(inputDesc, outputDesc); + std::cout << "Find plugin " << (ret == nullptr ? "false" : "successfully"); +} + +/** + * @tc.number : 010301 + * @tc.name : PluginManager_FindColorSpaceConverterDisplayPlugin + * @tc.desc : Test for PluginManager FindColorSpaceConverterDisplayPlugin + */ +HWTEST_F(VPEFrameworkUnitTest, PluginManager_FindColorSpaceConverterDisplayPlugin_010301, TestSize.Level1) +{ + std::unique_ptr pluginManager = std::make_unique(); + ASSERT_EQ(VPE_ERR_OK, pluginManager->Init()); + + auto ret = pluginManager->FindColorSpaceConverterDisplayPlugin(); + std::cout << "Find plugin " << (ret.empty() ? "false" : "successfully") << endl; +} + +/** + * @tc.number : 010401 + * @tc.name : PluginManager_FindMetadataGeneratorPlugin + * @tc.desc : Test for PluginManager FindMetadataGeneratorPlugin + */ +HWTEST_F(VPEFrameworkUnitTest, PluginManager_FindMetadataGeneratorPlugin_010401, TestSize.Level1) +{ + std::unique_ptr pluginManager = std::make_unique(); + ASSERT_EQ(VPE_ERR_OK, pluginManager->Init()); + + ColorSpaceDescription inputDesc = {}; + auto ret = pluginManager->FindMetadataGeneratorPlugin(inputDesc); + std::cout << "Find plugin " << (ret == nullptr ? "false" : "successfully") << endl; +} + +/** + * @tc.number : 020101 + * @tc.name : VideoProcessingEngine_CreateColorSpaceConverter + * @tc.desc : Test for VideoProcessingEngine CreateColorSpaceConverter + */ +HWTEST_F(VPEFrameworkUnitTest, VideoProcessingEngine_CreateColorSpaceConverter_020101, TestSize.Level1) +{ + int ret = 0; + auto &vpe = VideoProcessingEngine::GetInstance(); + + ColorSpaceDescription inputDesc = {}, outputDesc = {}; + auto impl = vpe.CreateColorSpaceConverter(inputDesc, outputDesc); + if (impl == nullptr) { + ret = -1; + } + cout << "Create ColorSpaceConverter " << (impl == nullptr ? "failed" : "successfully") << endl; + EXPECT_EQ(ret, 0); +} + +/** + * @tc.number : 020201 + * @tc.name : VideoProcessingEngine_CreateColorSpaceConverterDisplay + * @tc.desc : Test for VideoProcessingEngine CreateColorSpaceConverterDisplay + */ +HWTEST_F(VPEFrameworkUnitTest, VideoProcessingEngine_CreateColorSpaceConverterDisplay_020201, TestSize.Level1) +{ + int ret = 0; + auto &vpe = VideoProcessingEngine::GetInstance(); + + auto ret = vpe.CreateColorSpaceConverterDisplay(); + if (ret.empty()) { + ret = -1; + } + cout << "Create CreateColorSpaceConverterDisplay " << (ret.empty() ? "failed" : "successfully") << endl; + EXPECT_EQ(ret, 0); +} + +/** + * @tc.number : 020301 + * @tc.name : VideoProcessingEngine_CreateMetadataGenerator + * @tc.desc : Test for VideoProcessingEngine CreateMetadataGenerator + */ +HWTEST_F(VPEFrameworkUnitTest, VideoProcessingEngine_CreateMetadataGenerator_020301, TestSize.Level1) +{ + int ret = 0; + auto &vpe = VideoProcessingEngine::GetInstance(); + + ColorSpaceDescription inputDesc = {}; + auto impl = vpe.CreateMetadataGenerator(inputDesc); + if (impl == nullptr) { + ret = -1; + } + cout << "Create CreateMetadataGenerator " << (impl == nullptr ? "failed" : "successfully") << endl; + EXPECT_EQ(ret, 0); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/utils/ColorSpaceConverter/sample/BUILD.gn b/test/utils/ColorSpaceConverter/sample/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..a7026d9d4e9d7d165db87d228e4b98eeac4edfd2 --- /dev/null +++ b/test/utils/ColorSpaceConverter/sample/BUILD.gn @@ -0,0 +1,44 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +#http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_source_set("csc_test_utils") { + sources = [ + "csc_sample.cpp" + ] + + include_dirs = [ + "$INTERFACES_INNER_API_DIR", + ] + + external_deps = [ + "c_utils:utils", + "graphic_surface:surface", + "drivers_interface_display:display_commontype_idl_headers", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/utils/ColorSpaceConverter/sample/csc_sample.cpp b/test/utils/ColorSpaceConverter/sample/csc_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2c5c07a36aef610cea3912a5a81bd1f69f31f1e4 --- /dev/null +++ b/test/utils/ColorSpaceConverter/sample/csc_sample.cpp @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "csc_sample.h" +#include +#include +#include +#include +#include +#include "securec.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize) +{ + if (!yuvFile->is_open()) { + printf("Yuv file is not open\n"); + return; + } + yuvFile->read(reinterpret_cast(buffer->GetVirAddr()), frameSize); +} + +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) +{ + auto buffer = SurfaceBuffer::Create(); + if (nullptr == buffer) { + printf("Create surface buffer failed\n"); + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = width; + inputCfg.height = height; + inputCfg.strideAlignment = width; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER + | BUFFER_USAGE_HW_TEXTURE; + inputCfg.format = pixelFormat; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + if (GSERROR_OK != err) { + printf("Alloc surface buffer failed\n"); + return nullptr; + } + return buffer; +} + +void SetMeatadata(sptr &buffer, uint32_t value) +{ + std::vector metadata; + metadata.resize(sizeof(value)); + (void)memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)); + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + if (err != 0) { + printf("Buffer set metadata type, ret: %d\n", static_cast(err)); + } +} + +void SetMeatadata(sptr &buffer, const CM_ColorSpaceInfo &colorspaceInfo) +{ + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + (void)memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)); + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + if (err != 0) { + printf("Buffer set colorspace info, ret: %d\n", static_cast(err)); + } +} + +void SetMeatadata(sptr &buffer, std::unique_ptr &metadataFile) +{ + if (!metadataFile->is_open()) { + printf("Metadata file is not open\n"); + return; + } + + std::vector metadata; + uint32_t metadataSize = 0; + + metadataFile->read(reinterpret_cast(&metadataSize), sizeof(uint32_t)); + if (metadataSize == 0) { + printf("Read metadata failed, get a size: %u\n", metadataSize); + return; + } + metadata.resize(metadataSize); + metadataFile->read(reinterpret_cast(metadata.data()), metadataSize); + + GSError err = buffer->SetMetadata(ATTRKEY_HDR_DYNAMIC_METADATA, metadata); + if (err != 0) { + printf("Buffer hdr dynamic metadata type, ret: %d\n", static_cast(err)); + } +} + +void PrintMetadataType(sptr &buffer, int32_t bufferHandleAttrKey) +{ + std::vector metadata; + GSError err = buffer->GetMetadata(bufferHandleAttrKey, metadata); + if (err != 0) { + printf("Get metadata failed, err: %d\n", static_cast(err)); + return; + } + int32_t value; + (void)memcpy_s(&value, sizeof(value), metadata.data(), metadata.size()); + + std::string bufferHandleStr = bufferHandleAttrKey == ATTRKEY_COLORSPACE_TYPE ? "colorspace" : "metadata"; + printf("Buffer %s type %d\n", bufferHandleStr.c_str(), value); +} + +void PrintMetadataKey(sptr &buffer) +{ + std::vector keys; + GSError err = buffer->ListMetadataKeys(keys); + printf("List buffer key %d\n", static_cast(err)); + for (auto i : keys) { + printf("Buffer metadata key %u\n", i); + } + + err = buffer->EraseMetadataKey(ATTRKEY_COLORSPACE_TYPE); + printf("Buffer erase metadata key %d\n", static_cast(err)); + err = buffer->EraseMetadataKey(ATTRKEY_HDR_METADATA_TYPE); + printf("Buffer erase metadata key %d\n", static_cast(err)); +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/utils/ColorSpaceConverter/sample/csc_sample.h b/test/utils/ColorSpaceConverter/sample/csc_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..0848c7474970421345494ee9a9d795ad462ff622 --- /dev/null +++ b/test/utils/ColorSpaceConverter/sample/csc_sample.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_CSC_SAMPLE_H +#define VPE_TEST_CSC_SAMPLE_H + +#include "colorspace_converter.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize); +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height); +void SetMeatadata(sptr &buffer, uint32_t value); +void SetMeatadata(sptr &buffer, const CM_ColorSpaceInfo &colorspaceInfo); +void SetMeatadata(sptr &buffer, std::unique_ptr &metadataFile); +void PrintMetadataType(sptr &buffer, int32_t bufferHandleAttrKey); +void PrintMetadataKey(sptr &buffer); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_TEST_CSC_SAMPLE_H \ No newline at end of file diff --git a/test/utils/ColorSpaceConverter/sample/csc_sample_define.h b/test/utils/ColorSpaceConverter/sample/csc_sample_define.h new file mode 100644 index 0000000000000000000000000000000000000000..f7a8f398fea2fa10532777b3c3865ac1d2cdd646 --- /dev/null +++ b/test/utils/ColorSpaceConverter/sample/csc_sample_define.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_CSC_SAMPLE_DEFINE_H +#define VPE_TEST_CSC_SAMPLE_DEFINE_H + +#include "colorspace_converter.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +namespace CSCSampleDefine { +inline constexpr std::string_view METADATA_FILE = "/data/test/media/csc_metadata.bin"; + +inline constexpr std::string_view YUV_FILE = "/data/test/media/csc_yuv42010_2_frames.yuv"; +inline constexpr int32_t YUV_FILE_PIXEL_FORMAT = GRAPHIC_PIXEL_FMT_RGBA_1010102 + 1; +inline constexpr int32_t OUTPUT_PIXEL_FORMAT = GRAPHIC_PIXEL_FMT_YCBCR_420_SP; +inline constexpr uint32_t YUV_FILE_FRAME_NUM = 2; +inline constexpr int32_t WIDTH = 3840; +inline constexpr int32_t HEIGHT = 2160; +inline constexpr int32_t ONE_FRAME_SIZE = 24883200; // GRAPHIC_PIXEL_FMT_RGBA_1010102 + 1 +inline constexpr CM_ColorSpaceInfo INPUT_COLORSPACE_INFO = { + COLORPRIMARIES_BT2020, TRANSFUNC_HLG, MATRIX_BT2020, RANGE_LIMITED +}; +inline constexpr CM_ColorSpaceInfo OUTPUT_COLORSPACE_INFO = { + COLORPRIMARIES_BT709, TRANSFUNC_BT709, MATRIX_BT709, RANGE_LIMITED +}; +} // namespace CSCSampleDefine +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_TEST_CSC_SAMPLE_DEFINE_H \ No newline at end of file diff --git a/test/utils/DetailEnhancer/sample/BUILD.gn b/test/utils/DetailEnhancer/sample/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..1a359bffbd7aedb97f99049fb66bab2fb5361059 --- /dev/null +++ b/test/utils/DetailEnhancer/sample/BUILD.gn @@ -0,0 +1,46 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//foundation/multimedia/video_processing_engine/config.gni") + +ohos_source_set("detailEnh_test_utils") { + sources = [ + "detailEnh_sample.cpp" + ] + + include_dirs = [ + "$INTERFACES_INNER_API_DIR", + "$DETAIL_ENHANCER_DIR/include" + ] + + external_deps = [ + "c_utils:utils", +# "graphic_surface:surface", + "graphic_surface:surface", + "drivers_interface_display:display_commontype_idl_headers", + ] + + deps = [ + "$FRAMEWORK_DIR:videoprocessingengine" + ] + + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } + + subsystem_name = "multimedia" + part_name = "video_processing_engine" +} \ No newline at end of file diff --git a/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp b/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp new file mode 100644 index 0000000000000000000000000000000000000000..05860dbeffbdd211fb54a7e3641d0b9a563b3255 --- /dev/null +++ b/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "detailEnh" + +#include "detailEnh_sample.h" + +#include +#include +#include +#include +#include + +#include "securec.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize) +{ + if (buffer == nullptr) { + TEST_LOG("null ptr"); + return; + } + if (frameSize < 0) { + TEST_LOG("Invalid size"); + return; + } + if (!yuvFile->is_open()) { + TEST_LOG("Yuv file is not open"); + return; + } + yuvFile->read(reinterpret_cast(buffer->GetVirAddr()), frameSize); +} + +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) +{ + auto buffer = SurfaceBuffer::Create(); + if (buffer == nullptr) { + TEST_LOG("Create surface buffer failed"); + return nullptr; + } + if (width <= 0 || height <= 0) { + TEST_LOG("Invalid resolution"); + return nullptr; + } + BufferRequestConfig inputCfg; + inputCfg.width = width; + inputCfg.height = height; + inputCfg.strideAlignment = width; + inputCfg.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA; + inputCfg.format = pixelFormat; + inputCfg.timeout = 0; + GSError err = buffer->Alloc(inputCfg); + if (GSERROR_OK != err) { + TEST_LOG("Alloc surface buffer{ %d(%d)x%d format:%d } failed:%d", + inputCfg.width, inputCfg.strideAlignment, inputCfg.height, inputCfg.format, err); + return nullptr; + } + return buffer; +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS diff --git a/test/utils/DetailEnhancer/sample/detailEnh_sample.h b/test/utils/DetailEnhancer/sample/detailEnh_sample.h new file mode 100644 index 0000000000000000000000000000000000000000..dad27d17f57ee9714314e17195193d4353a439af --- /dev/null +++ b/test/utils/DetailEnhancer/sample/detailEnh_sample.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_DETAIL_ENH_SAMPLE_H +#define VPE_TEST_DETAIL_ENH_SAMPLE_H + +#include "detail_enhancer_image.h" + +#define TEST_LOG(msg, ...) printf("%s: %s:%d " msg "\n", LOG_TAG, __FUNCTION__, __LINE__, ##__VA_ARGS__) + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize); +sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_TEST_CSC_SAMPLE_H diff --git a/test/utils/DetailEnhancer/sample/detailEnh_sample_define.h b/test/utils/DetailEnhancer/sample/detailEnh_sample_define.h new file mode 100644 index 0000000000000000000000000000000000000000..d5dd7e36e863fc73117f09153019902846ff3be3 --- /dev/null +++ b/test/utils/DetailEnhancer/sample/detailEnh_sample_define.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_TEST_DETAIL_ENH_SAMPLE_DEFINE_H +#define VPE_TEST_DETAIL_ENH_SAMPLE_DEFINE_H + +#include "detail_enhancer_image.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +inline constexpr std::string_view NV12_FILE = "/data/test/media/nv12Image_4096x3072_1.yuv"; +inline constexpr std::string_view I420_FILE = "/data/test/media/yuv420Image_4096x3072_1.yuv"; +inline constexpr std::string_view RGBA_FILE = "/data/test/media/rgbaImage_4096x3072_1.yuv"; +inline constexpr std::string_view BGRA_FILE = "/data/test/media/bgraImage_4096x3072_1.yuv"; +inline constexpr int32_t WIDTH = 1024; +inline constexpr int32_t HEIGHT = 1024; +inline constexpr int32_t MIN_WIDTH = 100; +inline constexpr int32_t MIN_HEIGHT = 100; + +enum SUPPORT_FORMAT { + YUV400 = 0, + YVU420, + YUV422, + YUV444, + RGB, + RGBA, + NV12, + I420, + BGRA, + RGBA1010102 +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS +#endif // VPE_TEST_CSC_SAMPLE_DEFINE_H \ No newline at end of file