diff --git a/README.md b/README.md index 2ea58a8bee981f175229719b624103bd51c5f916..9df72f725e7bdf785c3112d9df927b8550782ed0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# VPE引擎 +# VPE引擎(multimedia_video_processing_engine) ## 简介 VPE(Video Processing Engine)引擎是处理视频和图像数据的媒体引擎,包括细节增强、对比度增强、亮度增强、动态范围增强等基础能力,为转码、分享、显示后处理等提供色彩空间转换、缩放超分、动态元数据集生成等基础算法。 diff --git a/README_EN.md b/README_EN.md index d300dc5b4530d934425d6d6af1c85edac6d47228..2a3cfff8745ab91f5978e494b7b206c18dae6157 100644 --- a/README_EN.md +++ b/README_EN.md @@ -1,4 +1,4 @@ -# VPE +# VPE(multimedia_video_processing_engine) ## Overview The Video Processing Engine (VPE) is a media engine for processing video and image data. It offers a range of fundamental capabilities including enhancements to details, contrast, luminance, and dynamic ranges. It also supports essential algorithms for color space conversion, scaling and upscaling, and dynamic metadata generation for transcoding, sharing, and post-processing for display. diff --git a/bundle.json b/bundle.json index ce185fea03304489583a86eedcfaa0df55678117..0334394b603124adfa446da0d7d731208eab55de 100644 --- a/bundle.json +++ b/bundle.json @@ -85,22 +85,24 @@ }, { "type": "so", - "name": "//foundation/multimedia/video_processing_engine/framework:image_processing_capi_impl", - "header":{ - "header_files":[ - "image_processing_capi_impl.h" + "name": "//foundation/multimedia/video_processing_engine/framework:image_processing", + "header": { + "header_files": [ + "image_processing.h", + "image_processing_types.h" ], - "header_base": "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/include" + "header_base": "//foundation/multimedia/video_processing_engine/interfaces/kits/c" } }, { "type": "so", - "name": "//foundation/multimedia/video_processing_engine/framework:video_processing_capi_impl", - "header":{ - "header_files":[ - "video_processing_capi_impl.h" + "name": "//foundation/multimedia/video_processing_engine/framework:video_processing", + "header": { + "header_files": [ + "video_processing.h", + "video_processing_types.h" ], - "header_base": "//foundation/multimedia/video_processing_engine/framework/capi/video_processing/include" + "header_base": "//foundation/multimedia/video_processing_engine/interfaces/kits/c" } }, { diff --git a/config.gni b/config.gni index 671cd55bab03c126385a8f38f187cb6fb1eaf17d..943ea6c647fac8ff29ce16c038d9f8d6efb0c17b 100644 --- a/config.gni +++ b/config.gni @@ -88,3 +88,9 @@ VIDEO_PROCESSING_ENGINE_CFLAGS = [ "-Wsign-compare", "-Wunused-parameter" ] + +if (defined(global_parts_info) && defined(global_parts_info.third_party_skia)) { + has_skia = true +} else { + has_skia = false +} \ No newline at end of file diff --git a/framework/BUILD.gn b/framework/BUILD.gn index 5b6b5b7984d63637921ad40be3b8e8b8abc6f8ff..99ed9fa21b5b7483fd553494e88d0c830b9b4d7f 100644 --- a/framework/BUILD.gn +++ b/framework/BUILD.gn @@ -12,9 +12,8 @@ # limitations under the License. import("//build/ohos.gni") -import("//foundation/ability/idl_tool/idl_config.gni") +import("//build/config/components/idl_tool/idl.gni") import("//foundation/multimedia/video_processing_engine/config.gni") -import("$CAPI_DIR/config.gni") config("export_config") { include_dirs = [ @@ -95,6 +94,8 @@ config("video_process_config") { "$SKIA_DIR/third_party/externals/angle2/src/common", "$SKIA_DIR/third_party/externals/angle2/src/", "$SKIA_DIR/third_party/externals/angle2/src/common/third_party/base/", + "//foundation/graphic/graphic_2d/utils/color_manager/export", + "//base/startup/init/interfaces/innerkits/include/syspara/", ] } @@ -192,12 +193,13 @@ ohos_shared_library("videoprocessingengine") { "$ALGORITHM_COMMON_DIR/image_openclsetup.cpp" ] - deps = [ + deps = ["//third_party/skia:skia_ohos", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", ":aihdr_engine", ":ai_super_resolution", ":extream_vision_engine", "//third_party/opencl-headers:libcl", + "//third_party/bounds_checking_function:libsec_static", ] external_deps = [ @@ -227,14 +229,38 @@ ohos_shared_library("videoprocessingengine") { part_name = "video_processing_engine" } -config("video_processing_engine_capi_config") { +config("vpe_capi_config") { + cflags = [ + "-fno-exceptions", + "-Wall", + "-fno-common", + "-fstack-protector-all", + "-Wshadow", + "-FPIC", + "-FS", + "-O2", + "-D_FORTIFY_SOURCE=2", + "-Wformat=2", + "-Wdate-time", + ] + + cflags_cc = [ + "-std=c++17", + "-fno-rtti", + ] +} + +config("vpe_capi_public_config") { include_dirs = [ - "$VIDEO_PROCESSING_ENGINE_CAPI_DIR/interface/kits/c", - "$VIDEO_PROCESSING_ENGINE_CAPI_DIR/interface/inner_api", + "$INTERFACES_CAPI_DIR", + "$INTERFACES_INNER_API_DIR", ] } -ohos_shared_library("image_processing_capi_impl") { +ohos_shared_library("image_processing") { + stack_protector_ret = true + install_enable = true + sanitize = { boundary_sanitize = true cfi = true @@ -244,19 +270,15 @@ ohos_shared_library("image_processing_capi_impl") { debug = false } - stack_protector_ret = true + configs = [ ":vpe_capi_config" ] + public_configs = [ ":vpe_capi_public_config" ] - cflags = VIDEO_PROCESSING_ENGINE_CFLAGS - cflags += [ + cflags = [ "-ffunction-sections", "-fdata-sections", "-DIMAGE_COLORSPACE_FLAG", ] - ldflags = [ "-Wl,--gc-sections" ] - - configs = [ ":video_processing_engine_capi_config" ] - include_dirs = [ #Interface "$INTERFACES_CAPI_DIR", @@ -277,17 +299,20 @@ ohos_shared_library("image_processing_capi_impl") { sources = [ "$CAPI_IMAGE_DIR/image_environment_native.cpp", - "$CAPI_IMAGE_DIR/image_processing_capi_impl.cpp", + "$CAPI_IMAGE_DIR/image_processing.cpp", "$CAPI_IMAGE_DIR/image_processing_factory.cpp", "$CAPI_IMAGE_DIR/image_processing_impl.cpp", "$CAPI_IMAGE_DIR/image_processing_native_base.cpp", "$CAPI_IMAGE_DIR/image_processing_utils.cpp", + "$CAPI_IMAGE_DIR/image_processing_capi_capability.cpp", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/framework/algorithm/common/vpe_utils_common.cpp", "$CAPI_IMAGE_DETAIL_ENHANCER_DIR/detail_enhancer_image_native.cpp", "$CAPI_COLORSPACE_CONVERTER_DIR/colorspace_converter_image_native.cpp", "$CAPI_METADATA_GENERATOR_DIR/metadata_generator_image_native.cpp", ] + defines = [] + deps = [ ":videoprocessingengine", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", @@ -295,25 +320,36 @@ ohos_shared_library("image_processing_capi_impl") { external_deps = [ "c_utils:utils", + "drivers_interface_display:display_commontype_idl_headers", "graphic_surface:surface", - "hilog:libhilog", "graphic_2d:2d_graphics", - "media_foundation:media_foundation", - "image_framework:pixelmap", + "hilog:libhilog", + "hitrace:hitrace_meter", "image_framework:image_native", - "drivers_interface_display:display_commontype_idl_headers", + "image_framework:pixelmap", "ipc:ipc_single", + "media_foundation:media_foundation", "safwk:system_ability_fwk", "samgr:samgr_proxy", ] + if (has_skia) { + defines += [ "SKIA_ENABLE" ] + external_deps += [ "skia:skia_canvaskit" ] + include_dirs += [ "$ALGORITHM_EXTENSION_MANAGER_DIR/include" ] + sources += [ "$ALGORITHM_EXTENSION_MANAGER_DIR/skia_impl.cpp" ] + } + + innerapi_tags = [ "ndk" ] output_extension = "so" subsystem_name = "multimedia" - innerapi_tags = [ "ndk" ] part_name = "video_processing_engine" } -ohos_shared_library("video_processing_capi_impl") { +ohos_shared_library("video_processing") { + stack_protector_ret = true + install_enable = true + sanitize = { boundary_sanitize = true cfi = true @@ -323,17 +359,8 @@ ohos_shared_library("video_processing_capi_impl") { debug = false } - stack_protector_ret = true - - cflags = VIDEO_PROCESSING_ENGINE_CFLAGS - cflags += [ - "-ffunction-sections", - "-fdata-sections", - ] - - ldflags = [ "-Wl,--gc-sections" ] - - configs = [ ":video_processing_engine_capi_config" ] + configs = [ ":vpe_capi_config" ] + public_configs = [ ":vpe_capi_public_config" ] include_dirs = [ #Interface @@ -343,13 +370,13 @@ ohos_shared_library("video_processing_capi_impl") { "$DFX_DIR/include", "$ALGORITHM_COMMON_DIR/include", #CAPI implementation - "$CAPI_VIDEO_DIR/include/", - "$CAPI_VIDEO_DIR/aihdr_enhancer/include/", - "$CAPI_VIDEO_DETAIL_ENHANCER_DIR/include/", + "$CAPI_VIDEO_DIR/include", + "$CAPI_VIDEO_DIR/aihdr_enhancer/include", + "$CAPI_VIDEO_DETAIL_ENHANCER_DIR/include", "$CAPI_VIDEO_COLORSPACE_CONVERTER_DIR/include/", "$CAPI_VIDEO_METADATA_GENERATOR_DIR/include/", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", - "${target_gen_dir}/../services/", + "${target_gen_dir}/../services/", ] sources = [ @@ -357,7 +384,7 @@ ohos_shared_library("video_processing_capi_impl") { "$CAPI_VIDEO_DIR/video_environment_native.cpp", "$CAPI_VIDEO_DIR/video_processing_callback_impl.cpp", "$CAPI_VIDEO_DIR/video_processing_callback_native.cpp", - "$CAPI_VIDEO_DIR/video_processing_capi_impl.cpp", + "$CAPI_VIDEO_DIR/video_processing.cpp", "$CAPI_VIDEO_DIR/video_processing_capi_capability.cpp", "$CAPI_VIDEO_DIR/video_processing_factory.cpp", "$CAPI_VIDEO_DIR/video_processing_impl.cpp", @@ -371,6 +398,8 @@ ohos_shared_library("video_processing_capi_impl") { "$CAPI_VIDEO_DIR/aihdr_enhancer/aihdr_enhancer_video_native.cpp" ] + defines = [] + deps = [ ":videoprocessingengine", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services:videoprocessingservice_interface", @@ -381,18 +410,21 @@ ohos_shared_library("video_processing_capi_impl") { "graphic_surface:surface", "hilog:libhilog", "hitrace:hitrace_meter", + "ipc:ipc_single", "media_foundation:media_foundation", "drivers_interface_display:display_commontype_idl_headers", - "graphic_2d:libgraphic_utils", - "graphic_2d:librender_service_client", - "ipc:ipc_single", - "safwk:system_ability_fwk", - "samgr:samgr_proxy", ] + if (has_skia) { + defines += [ "SKIA_ENABLE" ] + external_deps += [ "skia:skia_canvaskit" ] + include_dirs += [ "$$ALGORITHM_EXTENSION_MANAGER_DIR/include" ] + sources += [ "$ALGORITHM_EXTENSION_MANAGER_DIR/skia_impl.cpp" ] + } + + innerapi_tags = [ "ndk" ] output_extension = "so" subsystem_name = "multimedia" - innerapi_tags = [ "ndk" ] part_name = "video_processing_engine" } @@ -408,12 +440,16 @@ ohos_shared_library("detailEnhancer") { include_dirs = [ "$DFX_DIR/include", + "//foundation/multimedia/media_foundation/interface/kits/c", + "//foundation/multimedia/image_framework/interfaces/kits/native/include/image", + "//foundation/multimedia/image_framework/frameworks/kits/js/common/ndk/include", "//foundation/multimedia/video_processing_engine/interfaces/kits/js", "//foundation/multimedia/video_processing_engine/interfaces/inner_api", "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/include/", ] sources = [ "//foundation/multimedia/video_processing_engine/framework/capi/image_processing/detail_enhance_napi.cpp", + "//foundation/multimedia/image_framework/frameworks/innerkitsimpl/common/src/memory_manager.cpp", ] deps = [ @@ -428,7 +464,6 @@ ohos_shared_library("detailEnhancer") { "hitrace:hitrace_meter", "ipc:ipc_napi", "media_foundation:native_media_core", - "media_foundation:media_foundation", "napi:ace_napi", "image_framework:image_utils", "image_framework:image_native", @@ -459,6 +494,7 @@ ohos_shared_library("videoprocessingenginenapi") { "$INTERFACES_INNER_API_DIR", "$CAPI_DIR/image_processing/include/", "$ALGORITHM_COMMON_DIR/include/", + "$INTERFACES_CAPI_DIR", ] sources = [ "$CAPI_DIR/image_processing/detail_enhance_napi_formal.cpp", @@ -483,7 +519,6 @@ ohos_shared_library("videoprocessingenginenapi") { "ipc:ipc_napi", "media_foundation:native_media_core", "media_foundation:media_foundation", - "media_foundation:image_processing", "napi:ace_napi", ] diff --git a/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp index aa024898964a2e0994efcc9e82d110d4d15e2b82..b6c269a7938dfce64b5532fc3834370d5e54200b 100644 --- a/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp +++ b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp @@ -45,7 +45,7 @@ void ColorSpaceConverterFwk::OpenCLInit() } else { constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length char deviceName[DEVICE_NAME_LENGTH]; - auto status = SetupOpencl(&OpenclFoundationHandle, "HUA", deviceName); + auto status = SetupOpencl(&OpenclFoundationHandle, "HUAWEI", deviceName); if (status != static_cast(CL_SUCCESS)) { VPE_LOGE("%{public}s, Error: setupOpencl status=%{public}d\n", __FUNCTION__, status); } diff --git a/framework/algorithm/common/include/algorithm_types.h b/framework/algorithm/common/include/algorithm_types.h new file mode 100644 index 0000000000000000000000000000000000000000..95105d1463601c668dd94d3e920a3ea3a2907131 --- /dev/null +++ b/framework/algorithm/common/include/algorithm_types.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ALGORITHM_TYPES_H +#define ALGORITHM_TYPES_H + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +enum AlgoErrorCode { + ALGO_SUCCESS, + ALGO_ERROR_INVALID_PARAMETER, + ALGO_ERROR_UNKNOWN, + ALGO_ERROR_INITIALIZE_FAILED, + ALGO_ERROR_CREATE_FAILED, + ALGO_ERROR_PROCESS_FAILED, + ALGO_ERROR_UNSUPPORTED_PROCESSING, + ALGO_ERROR_OPERATION_NOT_PERMITTED, + ALGO_ERROR_NO_MEMORY, + ALGO_ERROR_INVALID_INSTANCE, + ALGO_ERROR_INVALID_VALUE +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // ALGORITHM_TYPES_H diff --git a/framework/algorithm/extensions/skia/include/skia_impl.h b/framework/algorithm/extensions/skia/include/skia_impl.h new file mode 100644 index 0000000000000000000000000000000000000000..49434888b1895126f97bf3b32c54218637e9083c --- /dev/null +++ b/framework/algorithm/extensions/skia/include/skia_impl.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SKIA_IMPL_H +#define SKIA_IMPL_H + +#include "surface_buffer.h" +#include "include/core/SkYUVAPixmaps.h" + +#include "algorithm_types.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class Skia { +public: + static AlgoErrorCode Process(const sptr& input, sptr& output); + +private: + Skia() = default; + virtual ~Skia() = default; + Skia(const Skia&) = delete; + Skia& operator=(const Skia&) = delete; + Skia(Skia&&) = delete; + Skia& operator=(Skia&&) = delete; +}; +} // VideoProcessingEngine +} // Media +} // OHOS + +#endif // SKIA_IMPL_H \ No newline at end of file diff --git a/framework/algorithm/extensions/skia/skia_impl.cpp b/framework/algorithm/extensions/skia/skia_impl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c83467aaf9926d1156ddfeeaf3f59494f820b4fc --- /dev/null +++ b/framework/algorithm/extensions/skia/skia_impl.cpp @@ -0,0 +1,242 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "skia_impl.h" + +#include +#include +#include + +#include "vpe_log.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace { +enum ImageFormatType { + IMAGE_FORMAT_TYPE_UNKNOWN = 0, + IMAGE_FORMAT_TYPE_RGB, + IMAGE_FORMAT_TYPE_YUV, +}; + +constexpr int CHANNEL_Y = 0; +constexpr int CHANNEL_UV1 = 1; +constexpr int CHANNEL_UV2 = 2; +constexpr int CHANNEL_NUM_NV12_NV21 = 2; +constexpr int CHANNEL_NUM_YU12_YV12 = 3; + +ImageFormatType GetImageType(const sptr& input, const sptr& output) +{ + if (input->GetFormat() != output->GetFormat()) { + VPE_LOGE("Different format for input and output!"); + return IMAGE_FORMAT_TYPE_UNKNOWN; + } + VPE_LOGD("Input format: %{public}d, output format: %{public}d.", input->GetFormat(), output->GetFormat()); + ImageFormatType imageType = IMAGE_FORMAT_TYPE_UNKNOWN; + switch (input->GetFormat()) { + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: + imageType = IMAGE_FORMAT_TYPE_RGB; + break; + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: // NV12 + case OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP: // NV21 + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: // YU12 + case OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P: // YV12 + imageType = IMAGE_FORMAT_TYPE_YUV; + break; + default: + VPE_LOGE("Default unknown type."); + break; + } + return imageType; +} + +SkColorType GetRGBImageFormat(const sptr& surfaceBuffer) +{ + SkColorType imageFormat; + switch (surfaceBuffer->GetFormat()) { + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + imageFormat = kRGBA_8888_SkColorType; + break; + case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: + imageFormat = kBGRA_8888_SkColorType; + break; + default: + imageFormat = kRGBA_8888_SkColorType; + break; + } + return imageFormat; +} + +AlgoErrorCode PixmapScale(const SkPixmap& inputPixmap, SkPixmap& outputPixmap, SkSamplingOptions options) +{ + if (!inputPixmap.scalePixels(outputPixmap, options)) { + return ALGO_ERROR_PROCESS_FAILED; + } + return ALGO_SUCCESS; +} + +AlgoErrorCode RGBScale(const sptr& input, sptr& output) +{ + if (input->GetWidth() <= 0 || input->GetHeight() <= 0 || output->GetWidth() <= 0 || output->GetHeight() <= 0) { + VPE_LOGE("Invalid input or output size!"); + return ALGO_ERROR_INVALID_VALUE; + } + SkImageInfo inputInfo = SkImageInfo::Make(input->GetWidth(), input->GetHeight(), GetRGBImageFormat(input), + kPremul_SkAlphaType); + SkImageInfo outputInfo = SkImageInfo::Make(output->GetWidth(), output->GetHeight(), GetRGBImageFormat(output), + kPremul_SkAlphaType); + SkPixmap inputPixmap(inputInfo, input->GetVirAddr(), input->GetStride()); + SkPixmap outputPixmap(outputInfo, output->GetVirAddr(), output->GetStride()); + + SkSamplingOptions scaleOption(SkFilterMode::kNearest); + return PixmapScale(inputPixmap, outputPixmap, scaleOption); +} + +int ConfigYUVFormat(const sptr& buffer, SkYUVAInfo::PlaneConfig& planeConfig, size_t* rowbyte, + unsigned char** pixmapAddr) +{ + int numPlanes; + void* planesInfoPtr = nullptr; + buffer->GetPlanesInfo(&planesInfoPtr); + auto planesInfo = static_cast(planesInfoPtr); + switch (buffer->GetFormat()) { + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: // NV12 + planeConfig = SkYUVAInfo::PlaneConfig::kY_UV; + rowbyte[CHANNEL_UV1] = planesInfo->planes[CHANNEL_UV1].columnStride; + pixmapAddr[CHANNEL_UV1] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV1].offset); + numPlanes = CHANNEL_NUM_NV12_NV21; + break; + case OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP: // NV21 + planeConfig = SkYUVAInfo::PlaneConfig::kY_VU; + rowbyte[CHANNEL_UV1] = planesInfo->planes[CHANNEL_UV2].columnStride; + pixmapAddr[CHANNEL_UV1] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV2].offset); + numPlanes = CHANNEL_NUM_NV12_NV21; + break; + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: // YU12 + planeConfig = SkYUVAInfo::PlaneConfig::kY_U_V; + rowbyte[CHANNEL_UV1] = planesInfo->planes[CHANNEL_UV1].columnStride; + rowbyte[CHANNEL_UV2] = planesInfo->planes[CHANNEL_UV2].columnStride; + pixmapAddr[CHANNEL_UV1] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV1].offset); + pixmapAddr[CHANNEL_UV2] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV2].offset); + numPlanes = CHANNEL_NUM_YU12_YV12; + break; + case OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P: // YV12 + planeConfig = SkYUVAInfo::PlaneConfig::kY_V_U; + rowbyte[CHANNEL_UV1] = planesInfo->planes[CHANNEL_UV2].columnStride; + rowbyte[CHANNEL_UV2] = planesInfo->planes[CHANNEL_UV1].columnStride; + pixmapAddr[CHANNEL_UV1] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV2].offset); + pixmapAddr[CHANNEL_UV2] = pixmapAddr[CHANNEL_Y] + + static_cast(planesInfo->planes[CHANNEL_UV1].offset); + numPlanes = CHANNEL_NUM_YU12_YV12; + break; + default: + VPE_LOGD("YUVFormat: default NV12."); + planeConfig = SkYUVAInfo::PlaneConfig::kY_UV; + rowbyte[CHANNEL_UV1] = planesInfo->planes[CHANNEL_UV1].columnStride; + numPlanes = CHANNEL_NUM_NV12_NV21; + break; + } + return numPlanes; +} + +int CreateYUVPixmap(const sptr& buffer, std::array& pixmaps) +{ + SkISize imageSize; + imageSize.fWidth = buffer->GetWidth(); + if (imageSize.fWidth <= 0) { + VPE_LOGE("Invalid width!"); + return 0; + } + imageSize.fHeight = buffer->GetHeight(); + if (imageSize.fHeight <= 0) { + VPE_LOGE("Invalid height!"); + return 0; + } + SkYUVColorSpace yuvColorSpace = SkYUVColorSpace::kRec709_Full_SkYUVColorSpace; + SkYUVAInfo::Subsampling subsampling = SkYUVAInfo::Subsampling::k420; + SkYUVAInfo::PlaneConfig planeConfig = SkYUVAInfo::PlaneConfig::kY_UV; + size_t rowbyte[SkYUVAInfo::kMaxPlanes]; + unsigned char* pixmapAddr[SkYUVAInfo::kMaxPlanes]; + + void* planesInfoPtr = nullptr; + buffer->GetPlanesInfo(&planesInfoPtr); + auto planesInfo = static_cast(planesInfoPtr); + if (planesInfoPtr == nullptr) { + VPE_LOGD("Planes info is nullptr, configure uv stride with general stride."); + for (int i = 0; i < SkYUVAInfo::kMaxPlanes; i++) { + rowbyte[i] = static_cast(buffer->GetStride()); + } + } + + rowbyte[CHANNEL_Y] = planesInfo->planes[CHANNEL_Y].columnStride; + pixmapAddr[CHANNEL_Y] = static_cast(buffer->GetVirAddr()); + int numPlanes = ConfigYUVFormat(buffer, planeConfig, rowbyte, pixmapAddr); + + const SkYUVAInfo yuvInfo = SkYUVAInfo(imageSize, planeConfig, subsampling, yuvColorSpace); + const SkYUVAPixmapInfo pixmapInfo = SkYUVAPixmapInfo(yuvInfo, SkYUVAPixmapInfo::DataType::kUnorm8, rowbyte); + + for (int i = 0; i < numPlanes; i++) { + pixmaps[i].reset(pixmapInfo.planeInfo(i), pixmapAddr[i], rowbyte[i]); + } + return numPlanes; +} + +AlgoErrorCode YUVPixmapScale(const std::array& inputPixmap, + std::array& outputPixmap, SkSamplingOptions opt, int numPlanes) +{ + for (int i = 0; i < numPlanes; i++) { + if (!inputPixmap[i].scalePixels(outputPixmap[i], opt)) { + VPE_LOGE("YUV scale failed!"); + return ALGO_ERROR_PROCESS_FAILED; + } + } + return ALGO_SUCCESS; +} + +AlgoErrorCode YUVScale(const sptr& input, sptr& output) +{ + std::array inputPixmap; + std::array outputPixmap; + int numPlanesInput = CreateYUVPixmap(input, inputPixmap); + int numPlanesOutput = CreateYUVPixmap(output, outputPixmap); + if (numPlanesInput != numPlanesOutput || numPlanesInput * numPlanesOutput == 0) { + VPE_LOGE("Wrong YUV settings!"); + return ALGO_ERROR_INVALID_VALUE; + } + SkSamplingOptions scaleOption(SkFilterMode::kNearest); + return YUVPixmapScale(inputPixmap, outputPixmap, scaleOption, numPlanesInput); +} +} + +AlgoErrorCode Skia::Process(const sptr& input, sptr& output) +{ + AlgoErrorCode errCode; + ImageFormatType imageType = GetImageType(input, output); + if (imageType == IMAGE_FORMAT_TYPE_RGB) { + errCode = RGBScale(input, output); + } else if (imageType == IMAGE_FORMAT_TYPE_YUV) { + errCode = YUVScale(input, output); + } else { + VPE_LOGE("Unknown image format!"); + errCode = ALGO_ERROR_INVALID_VALUE; + } + return errCode; +} \ No newline at end of file diff --git a/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp b/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp index b5eea56587dd9010764401f83100498037dbc5d9..0e67e90832a47538974e793de0d088ff1121a902 100644 --- a/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp +++ b/framework/capi/image_processing/colorspace_converter/colorspace_converter_image_native.cpp @@ -17,7 +17,7 @@ #include -#include "image_processing_capi_impl.h" +#include "image_processing_capi_capability.h" #include "detail_enhancer_common.h" #include "detail_enhancer_image_fwk.h" #include "image_processing_utils.h" diff --git a/framework/capi/image_processing/image_processing.cpp b/framework/capi/image_processing/image_processing.cpp new file mode 100644 index 0000000000000000000000000000000000000000..72abce22da5c364580e8e8372b99c286ebda381d --- /dev/null +++ b/framework/capi/image_processing/image_processing.cpp @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "image_processing.h" + +#include +#include + +#include "vpe_log.h" +#include "image_processing_capi_capability.h" +#include "image_environment_native.h" +#include "image_processing_impl.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +const int32_t IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION = 0x1; +const int32_t IMAGE_PROCESSING_TYPE_COMPOSITION = 0x2; +const int32_t IMAGE_PROCESSING_TYPE_DECOMPOSITION = 0x4; +const int32_t IMAGE_PROCESSING_TYPE_METADATA_GENERATION = 0x8; +const int32_t IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER = 0x10; +const char* IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL = "QualityLevel"; + +namespace { +ImageProcessing_ErrorCode CallImageProcessing(OH_ImageProcessing* imageProcessor, + std::function&)>&& operation) +{ + CHECK_AND_RETURN_RET_LOG(imageProcessor != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "imageProcessor is null!"); + auto imageProcessing = imageProcessor->GetImageProcessing(); + CHECK_AND_RETURN_RET_LOG(imageProcessing != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + "imageProcessor is invalid!"); + return operation(imageProcessing); +} +} + +ImageProcessing_ErrorCode OH_ImageProcessing_InitializeEnvironment(void) +{ + CHECK_AND_RETURN_RET_LOG(ImageProcessingCapiCapability::Get().OpenCLInit() == IMAGE_PROCESSING_SUCCESS, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "OpenCLInit failed!"); + CHECK_AND_RETURN_RET_LOG(ImageProcessingCapiCapability::Get().OpenGLInit() == IMAGE_PROCESSING_SUCCESS, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "OpenGLInit failed!"); + return ImageEnvironmentNative::Get().Initialize(); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_DeinitializeEnvironment(void) +{ + return ImageEnvironmentNative::Get().Deinitialize(); +} + +bool OH_ImageProcessing_IsColorSpaceConversionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + ImageProcessingCapiCapability::Get().LoadLibrary(); + auto flag = ImageProcessingCapiCapability::Get().CheckColorSpaceConversionSupport(sourceImageInfo, + destinationImageInfo); + ImageProcessingCapiCapability::Get().UnloadLibrary(); + return flag; +} + +bool OH_ImageProcessing_IsCompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo) +{ + ImageProcessingCapiCapability::Get().LoadLibrary(); + auto flag = ImageProcessingCapiCapability::Get().CheckCompositionSupport(sourceImageInfo, + sourceGainmapInfo, destinationImageInfo); + ImageProcessingCapiCapability::Get().UnloadLibrary(); + return flag; +} + +bool OH_ImageProcessing_IsDecompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) +{ + ImageProcessingCapiCapability::Get().LoadLibrary(); + auto flag = ImageProcessingCapiCapability::Get().CheckDecompositionSupport(sourceImageInfo, + destinationImageInfo, destinationGainmapInfo); + ImageProcessingCapiCapability::Get().UnloadLibrary(); + return flag; +} + +bool OH_ImageProcessing_IsMetadataGenerationSupported(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) +{ + ImageProcessingCapiCapability::Get().LoadLibrary(); + auto flag = ImageProcessingCapiCapability::Get().CheckMetadataGenerationSupport(sourceImageInfo); + ImageProcessingCapiCapability::Get().UnloadLibrary(); + return flag; +} + +ImageProcessing_ErrorCode OH_ImageProcessing_Create(OH_ImageProcessing** imageProcessor, int type) +{ + return OH_ImageProcessing::Create(imageProcessor, type, ImageProcessingCapiCapability::Get().GetOpenGLContext(), + ImageProcessingCapiCapability::Get().GetClContext()); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_Destroy(OH_ImageProcessing* imageProcessor) +{ + return OH_ImageProcessing::Destroy(imageProcessor); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_SetParameter(OH_ImageProcessing* imageProcessor, + const OH_AVFormat* parameter) +{ + return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { + return obj->SetParameter(parameter); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_GetParameter(OH_ImageProcessing* imageProcessor, OH_AVFormat* parameter) +{ + return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { + return obj->GetParameter(parameter); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_ConvertColorSpace(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( + std::shared_ptr& obj) { + return obj->ConvertColorSpace(sourceImage, destinationImage); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_Compose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &sourceGainmap, &destinationImage]( + std::shared_ptr& obj) { + return obj->Compose(sourceImage, sourceGainmap, destinationImage); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_Decompose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, OH_PixelmapNative* destinationGainmap) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage, &destinationGainmap]( + std::shared_ptr& obj) { + return obj->Decompose(sourceImage, destinationImage, destinationGainmap); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_GenerateMetadata(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage]( + std::shared_ptr& obj) { + return obj->GenerateMetadata(sourceImage); + }); +} + +ImageProcessing_ErrorCode OH_ImageProcessing_EnhanceDetail(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) +{ + return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( + std::shared_ptr& obj) { + return obj->Process(sourceImage, destinationImage); + }); +} diff --git a/framework/capi/image_processing/image_processing_capi_impl.cpp b/framework/capi/image_processing/image_processing_capi_capability.cpp similarity index 63% rename from framework/capi/image_processing/image_processing_capi_impl.cpp rename to framework/capi/image_processing/image_processing_capi_capability.cpp index f60b6f145ac90b4a17d7232897557560b7fa10ea..3731647b64f34f8c84add5ea90dead085fe5c252 100644 --- a/framework/capi/image_processing/image_processing_capi_impl.cpp +++ b/framework/capi/image_processing/image_processing_capi_capability.cpp @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,36 +13,17 @@ * limitations under the License. */ -#include "image_processing_capi_impl.h" - -#include "vpe_log.h" - -#include "image_environment_native.h" -#include "image_processing_impl.h" +#include "image_processing_capi_capability.h" using namespace OHOS::Media::VideoProcessingEngine; -const int32_t IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION = 0x1; -const int32_t IMAGE_PROCESSING_TYPE_COMPOSITION = 0x2; -const int32_t IMAGE_PROCESSING_TYPE_DECOMPOSITION = 0x4; -const int32_t IMAGE_PROCESSING_TYPE_METADATA_GENERATION = 0x8; -const int32_t IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER = 0x10; -const char* IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL = "QualityLevel"; - -namespace { -ImageProcessing_ErrorCode CallImageProcessing(OH_ImageProcessing* imageProcessor, - std::function&)>&& operation) +ImageProcessingCapiCapability& ImageProcessingCapiCapability::Get() { - CHECK_AND_RETURN_RET_LOG(imageProcessor != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, - "imageProcessor is null!"); - auto imageProcessing = imageProcessor->GetImageProcessing(); - CHECK_AND_RETURN_RET_LOG(imageProcessing != nullptr, IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, - "imageProcessor is invalid!"); - return operation(imageProcessing); -} + static ImageProcessingCapiCapability instance{}; + return instance; } -ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenCLInit() +ImageProcessing_ErrorCode ImageProcessingCapiCapability::OpenCLInit() { void *OpenclFoundationHandle = nullptr; std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; @@ -52,7 +33,7 @@ ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenCLInit() } else { constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length char deviceName[DEVICE_NAME_LENGTH]; - auto status = SetupOpencl(&OpenclFoundationHandle, "HUA", deviceName); + auto status = SetupOpencl(&OpenclFoundationHandle, "HUAWEI", deviceName); CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "GetOpenCLContext SetupOpencl fail!"); @@ -61,7 +42,7 @@ ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenCLInit() return IMAGE_PROCESSING_SUCCESS; } -ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenGLInit() +ImageProcessing_ErrorCode ImageProcessingCapiCapability::OpenGLInit() { auto status = SetupOpengl(openglContext_); CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), @@ -70,22 +51,17 @@ ImageProcessing_ErrorCode ImageProcessingCapiImpl::OpenGLInit() return IMAGE_PROCESSING_SUCCESS; } -ImageProcessing_ErrorCode ImageProcessingCapiImpl::InitializeEnvironment() +ClContext* ImageProcessingCapiCapability::GetClContext() { - CHECK_AND_RETURN_RET_LOG(OpenCLInit() == IMAGE_PROCESSING_SUCCESS, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "OpenCLInit failed!"); - CHECK_AND_RETURN_RET_LOG(OpenGLInit() == IMAGE_PROCESSING_SUCCESS, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "OpenGLInit failed!"); - - return ImageEnvironmentNative::Get().Initialize(); + return openclContext_; } -ImageProcessing_ErrorCode ImageProcessingCapiImpl::DeinitializeEnvironment() +std::shared_ptr ImageProcessingCapiCapability::GetOpenGLContext() { - return ImageEnvironmentNative::Get().Deinitialize(); + return openglContext_; } -void ImageProcessingCapiImpl::LoadLibrary() +void ImageProcessingCapiCapability::LoadLibrary() { std::lock_guard lock(lock_); if (usedInstance_ == 0 && mLibHandle == nullptr) { @@ -94,7 +70,8 @@ void ImageProcessingCapiImpl::LoadLibrary() } usedInstance_++; } -void ImageProcessingCapiImpl::UnloadLibrary() + +void ImageProcessingCapiCapability::UnloadLibrary() { std::lock_guard lock(lock_); usedInstance_--; @@ -104,29 +81,7 @@ void ImageProcessingCapiImpl::UnloadLibrary() } } -ImageProcessing_ErrorCode ImageProcessingCapiImpl::LoadAlgo() -{ - CHECK_AND_RETURN_RET_LOG(mLibHandle != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "Library is nullptr!"); - std::pair funcs[] = { - { "ImageProcessing_IsColorSpaceConversionSupported", isColorSpaceConversionSupported_}, - { "ImageProcessing_IsCompositionSupported", isCompositionSupported_ }, - { "ImageProcessing_IsDecompositionSupported", isDecompositionSupported_ }, - }; - for (auto& func : funcs) { - func.second = reinterpret_cast(dlsym(mLibHandle, func.first.c_str())); - CHECK_AND_RETURN_RET_LOG(func.second != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "Failed to locate %s in - %s", func.first.c_str(), dlerror()); - } - isMetadataGenSupported_ = reinterpret_cast(dlsym(mLibHandle, - "ImageProcessing_IsMetadataGenerationSupported")); - CHECK_AND_RETURN_RET_LOG(isMetadataGenSupported_ != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "Failed to locate %s in - %s", "ImageProcessing_IsMetadataGenerationSupported", - dlerror()); - return IMAGE_PROCESSING_SUCCESS; -} - -bool ImageProcessingCapiImpl::CheckColorSpaceConversionSupport( +bool ImageProcessingCapiCapability::CheckColorSpaceConversionSupport( const ImageProcessing_ColorSpaceInfo* sourceImageInfo, const ImageProcessing_ColorSpaceInfo* destinationImageInfo) { @@ -161,7 +116,7 @@ bool ImageProcessingCapiImpl::CheckColorSpaceConversionSupport( return isColorSpaceConversionSupported_(inputInfo, outputInfo); } -bool ImageProcessingCapiImpl::CheckCompositionSupport( +bool ImageProcessingCapiCapability::CheckCompositionSupport( const ImageProcessing_ColorSpaceInfo* sourceImageInfo, const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, const ImageProcessing_ColorSpaceInfo* destinationImageInfo) @@ -198,7 +153,7 @@ bool ImageProcessingCapiImpl::CheckCompositionSupport( return isCompositionSupported_(inputInfo, outputInfo); } -bool ImageProcessingCapiImpl::CheckDecompositionSupport( +bool ImageProcessingCapiCapability::CheckDecompositionSupport( const ImageProcessing_ColorSpaceInfo* sourceImageInfo, const ImageProcessing_ColorSpaceInfo* destinationImageInfo, const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) @@ -235,7 +190,8 @@ bool ImageProcessingCapiImpl::CheckDecompositionSupport( return isDecompositionSupported_(inputInfo, outputInfo); } -bool ImageProcessingCapiImpl::CheckMetadataGenerationSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) +bool ImageProcessingCapiCapability::CheckMetadataGenerationSupport( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo) { CHECK_AND_RETURN_RET_LOG(sourceImageInfo != nullptr, false, "sourceImageInfo is nullptr!"); auto status = LoadAlgo(); @@ -257,126 +213,25 @@ bool ImageProcessingCapiImpl::CheckMetadataGenerationSupport(const ImageProcessi return isMetadataGenSupported_(inputInfo); } -bool ImageProcessingCapiImpl::IsColorSpaceConversionSupported( - const ImageProcessing_ColorSpaceInfo* sourceImageInfo, - const ImageProcessing_ColorSpaceInfo* destinationImageInfo) -{ - LoadLibrary(); - auto flag = CheckColorSpaceConversionSupport(sourceImageInfo, destinationImageInfo); - UnloadLibrary(); - return flag; -} - -bool ImageProcessingCapiImpl::IsCompositionSupported( - const ImageProcessing_ColorSpaceInfo* sourceImageInfo, - const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, - const ImageProcessing_ColorSpaceInfo* destinationImageInfo) -{ - LoadLibrary(); - auto flag = CheckCompositionSupport(sourceImageInfo, sourceGainmapInfo, destinationImageInfo); - UnloadLibrary(); - return flag; -} - -bool ImageProcessingCapiImpl::IsDecompositionSupported( - const ImageProcessing_ColorSpaceInfo* sourceImageInfo, - const ImageProcessing_ColorSpaceInfo* destinationImageInfo, - const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo) -{ - LoadLibrary(); - auto flag = CheckDecompositionSupport(sourceImageInfo, destinationImageInfo, destinationGainmapInfo); - UnloadLibrary(); - return flag; -} - -bool ImageProcessingCapiImpl::IsMetadataGenerationSupported(const ImageProcessing_ColorSpaceInfo* sourceImageInfo) -{ - LoadLibrary(); - auto flag = CheckMetadataGenerationSupport(sourceImageInfo); - - return flag; -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::Create(OH_ImageProcessing** imageProcessor, int type) +ImageProcessing_ErrorCode ImageProcessingCapiCapability::LoadAlgo() { - return OH_ImageProcessing::Create(imageProcessor, type, openglContext_, openclContext_); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::Destroy(OH_ImageProcessing* imageProcessor) -{ - return OH_ImageProcessing::Destroy(imageProcessor); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::SetParameter(OH_ImageProcessing* imageProcessor, - const OH_AVFormat* parameter) -{ - return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { - return obj->SetParameter(parameter); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::GetParameter(OH_ImageProcessing* imageProcessor, - OH_AVFormat* parameter) -{ - return CallImageProcessing(imageProcessor, [¶meter](std::shared_ptr& obj) { - return obj->GetParameter(parameter); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::ConvertColorSpace(OH_ImageProcessing* imageProcessor, - OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) -{ - return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( - std::shared_ptr& obj) { - return obj->ConvertColorSpace(sourceImage, destinationImage); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::Compose(OH_ImageProcessing* imageProcessor, - OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage) -{ - return CallImageProcessing(imageProcessor, [&sourceImage, &sourceGainmap, &destinationImage]( - std::shared_ptr& obj) { - return obj->Compose(sourceImage, sourceGainmap, destinationImage); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::Decompose(OH_ImageProcessing* imageProcessor, - OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, - OH_PixelmapNative* destinationGainmap) -{ - return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage, &destinationGainmap]( - std::shared_ptr& obj) { - return obj->Decompose(sourceImage, destinationImage, destinationGainmap); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::GenerateMetadata(OH_ImageProcessing* imageProcessor, - OH_PixelmapNative* sourceImage) -{ - return CallImageProcessing(imageProcessor, [&sourceImage]( - std::shared_ptr& obj) { - return obj->GenerateMetadata(sourceImage); - }); -} - -ImageProcessing_ErrorCode ImageProcessingCapiImpl::EnhanceDetail(OH_ImageProcessing* imageProcessor, - OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage) -{ - return CallImageProcessing(imageProcessor, [&sourceImage, &destinationImage]( - std::shared_ptr& obj) { - return obj->Process(sourceImage, destinationImage); - }); -} - -IImageProcessingNdk* CreateImageProcessingNdk() -{ - return new(std::nothrow) ImageProcessingCapiImpl(); + CHECK_AND_RETURN_RET_LOG(mLibHandle != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Library is nullptr!"); + std::pair funcs[] = { + { "ImageProcessing_IsColorSpaceConversionSupported", isColorSpaceConversionSupported_}, + { "ImageProcessing_IsCompositionSupported", isCompositionSupported_ }, + { "ImageProcessing_IsDecompositionSupported", isDecompositionSupported_ }, + }; + for (auto& func : funcs) { + func.second = reinterpret_cast(dlsym(mLibHandle, func.first.c_str())); + CHECK_AND_RETURN_RET_LOG(func.second != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Failed to locate %s in - %s", func.first.c_str(), dlerror()); + } + isMetadataGenSupported_ = reinterpret_cast(dlsym(mLibHandle, + "ImageProcessing_IsMetadataGenerationSupported")); + CHECK_AND_RETURN_RET_LOG(isMetadataGenSupported_ != nullptr, IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + "Failed to locate %s in - %s", "ImageProcessing_IsMetadataGenerationSupported", + dlerror()); + return IMAGE_PROCESSING_SUCCESS; } -void DestroyImageProcessingNdk(IImageProcessingNdk* obj) -{ - CHECK_AND_RETURN_LOG(obj != nullptr, "VPE image processing is null!"); - ImageProcessingCapiImpl* impl = static_cast(obj); - delete impl; -} diff --git a/framework/capi/image_processing/include/image_processing_capi_impl.h b/framework/capi/image_processing/include/image_processing_capi_capability.h similarity index 62% rename from framework/capi/image_processing/include/image_processing_capi_impl.h rename to framework/capi/image_processing/include/image_processing_capi_capability.h index 91c8d7f5062e0a24f7240b1cd04a26eb5fb2f2b2..9c7fe7f06422da962125e583e5a43a5dfb9bda8c 100644 --- a/framework/capi/image_processing/include/image_processing_capi_impl.h +++ b/framework/capi/image_processing/include/image_processing_capi_capability.h @@ -13,26 +13,23 @@ * limitations under the License. */ -#ifndef IMAGE_PROCESSING_CAPI_IMPL_H -#define IMAGE_PROCESSING_CAPI_IMPL_H +#ifndef IMAGE_PROCESSING_CAPI_CAPABILITY_H +#define IMAGE_PROCESSING_CAPI_CAPABILITY_H -#include "image_processing_capi_interface.h" -#include "frame_info.h" -#include #include +#include #include #include +#include +#include "algorithm_common.h" +#include "frame_info.h" #include "image_processing_native_template.h" #include "image_processing_types.h" #include "pixelmap_native_impl.h" - #include "detail_enhancer_image.h" #include "colorspace_converter.h" #include "colorspace_converter_image_native.h" - -#include - #include "detail_enhancer_common.h" #include "detail_enhancer_image_fwk.h" #include "image_processing_utils.h" @@ -41,6 +38,10 @@ #include "surface_type.h" #include "vpe_log.h" +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + const std::map IMAGE_FORMAT_MAP = { { OHOS::Media::PixelFormat::RGBA_8888, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888 }, { OHOS::Media::PixelFormat::BGRA_8888, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BGRA_8888 }, @@ -48,12 +49,14 @@ const std::map IMAGE_FORMAT_ { OHOS::Media::PixelFormat::YCBCR_P010, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_P010 }, { OHOS::Media::PixelFormat::YCRCB_P010, OHOS::GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_P010 }, }; + typedef enum { NONE = 0, BASE = 1, GAINMAP = 2, ALTERNATE = 3, } ImagePixelmapHdrMetadataType; + const std::map HDR_METADATA_TYPE_MAP = { { NONE, OHOS::HDI::Display::Graphic::Common::V1_0::CM_METADATA_NONE }, @@ -61,6 +64,7 @@ const std::map COLORSPACE_MAP = { { SRGB, OHOS::HDI::Display::Graphic::Common::V1_0::CM_SRGB_FULL }, { SRGB_LIMIT, OHOS::HDI::Display::Graphic::Common::V1_0::CM_SRGB_LIMIT }, @@ -89,47 +94,22 @@ const std::map GetOpenGLContext(); + ClContext* GetClContext(); void LoadLibrary(); void UnloadLibrary(); bool CheckColorSpaceConversionSupport( @@ -142,23 +122,27 @@ private: const ImageProcessing_ColorSpaceInfo* destinationImageInfo, const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo); bool CheckMetadataGenerationSupport(const ImageProcessing_ColorSpaceInfo* sourceImageInfo); - void* mLibHandle{}; + +private: using LibFunction = bool (*)(const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo, const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo outputInfo); using LibMetaFunction = bool (*)(const OHOS::Media::VideoProcessingEngine::ColorSpaceInfo inputInfo); + + ImageProcessing_ErrorCode LoadAlgo(); + + std::shared_ptr openglContext_{nullptr}; + ClContext *openclContext_{nullptr}; + std::mutex lock_; + int32_t usedInstance_ {0}; + void* mLibHandle{}; + LibFunction isDecompositionSupported_{nullptr}; LibFunction isColorSpaceConversionSupported_{nullptr}; LibFunction isCompositionSupported_{nullptr}; - LibFunction isDecompositionSupported_{nullptr}; LibMetaFunction isMetadataGenSupported_{nullptr}; - std::mutex lock_; - int32_t usedInstance_ { 0 }; - ClContext *openclContext_ {nullptr}; - ImageProcessing_ErrorCode OpenCLInit(); - std::shared_ptr openglContext_ {nullptr}; - ImageProcessing_ErrorCode OpenGLInit(); }; -extern "C" IImageProcessingNdk* CreateImageProcessingNdk(); -extern "C" void DestroyImageProcessingNdk(IImageProcessingNdk* obj); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS -#endif // IMAGE_PROCESSING_CAPI_IMPL_H +#endif \ No newline at end of file diff --git a/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp b/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp index 49722e270c43c9db9c4d0780075c11de0d5bc6b3..01f019b99b71d879970275a6c7f0204b8ca1d926 100644 --- a/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp +++ b/framework/capi/image_processing/metadata_generator/metadata_generator_image_native.cpp @@ -17,7 +17,7 @@ #include -#include "image_processing_capi_impl.h" +#include "image_processing_capi_capability.h" #include "detail_enhancer_common.h" #include "detail_enhancer_image_fwk.h" #include "image_processing_utils.h" diff --git a/framework/capi/video_processing/include/video_processing_capi_capability.h b/framework/capi/video_processing/include/video_processing_capi_capability.h index a658327a0ee72976b6a129f302277822e180d23f..0b1966aceb82698eb7d4d0afbbeeceeaacd00b2e 100644 --- a/framework/capi/video_processing/include/video_processing_capi_capability.h +++ b/framework/capi/video_processing/include/video_processing_capi_capability.h @@ -23,6 +23,7 @@ #include "surface_type.h" #include "video_processing_types.h" +#include "algorithm_common.h" namespace OHOS { namespace Media { @@ -60,6 +61,10 @@ public: static bool IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, const VideoProcessing_ColorSpaceInfo* destinationVideoInfo); static bool IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo); + static VideoProcessing_ErrorCode OpenGLInit(); + static std::shared_ptr GetOpenGLContext(); +private: + static std::shared_ptr openglContext_; }; } // namespace VideoProcessingEngine } // namespace Media diff --git a/framework/capi/video_processing/include/video_processing_capi_impl.h b/framework/capi/video_processing/include/video_processing_capi_impl.h deleted file mode 100644 index 45014f1ff184d6221c4867150e744be81baa6dec..0000000000000000000000000000000000000000 --- a/framework/capi/video_processing/include/video_processing_capi_impl.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2024 Huawei Device Co., Ltd. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef VIDEO_PROCESSING_CAPI_IMPL_H -#define VIDEO_PROCESSING_CAPI_IMPL_H - -#include "video_processing_capi_interface.h" -#include "algorithm_common.h" - -class VideoProcessingCapiImpl : public IVideoProcessingNdk { -public: - VideoProcessingCapiImpl() = default; - virtual ~VideoProcessingCapiImpl() = default; - VideoProcessingCapiImpl(const VideoProcessingCapiImpl&) = delete; - VideoProcessingCapiImpl& operator=(const VideoProcessingCapiImpl&) = delete; - VideoProcessingCapiImpl(VideoProcessingCapiImpl&&) = delete; - VideoProcessingCapiImpl& operator=(VideoProcessingCapiImpl&&) = delete; - - VideoProcessing_ErrorCode InitializeEnvironment() final; - VideoProcessing_ErrorCode DeinitializeEnvironment() final; - bool IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, - const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) final; - bool IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) final; - VideoProcessing_ErrorCode Create(OH_VideoProcessing** videoProcessor, int type) final; - VideoProcessing_ErrorCode Destroy(OH_VideoProcessing* videoProcessor) final; - VideoProcessing_ErrorCode RegisterCallback(OH_VideoProcessing* videoProcessor, - const VideoProcessing_Callback* callback, void* userData) final; - VideoProcessing_ErrorCode SetSurface(OH_VideoProcessing* videoProcessor, const OHNativeWindow* window) final; - VideoProcessing_ErrorCode GetSurface(OH_VideoProcessing* videoProcessor, OHNativeWindow** window) final; - VideoProcessing_ErrorCode SetParameter(OH_VideoProcessing* videoProcessor, - const OH_AVFormat* parameter) final; - VideoProcessing_ErrorCode GetParameter(OH_VideoProcessing* videoProcessor, OH_AVFormat* parameter) final; - VideoProcessing_ErrorCode Start(OH_VideoProcessing* videoProcessor) final; - VideoProcessing_ErrorCode Stop(OH_VideoProcessing* videoProcessor) final; - VideoProcessing_ErrorCode RenderOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index) final; - - VideoProcessing_ErrorCode Create(VideoProcessing_Callback** callback) final; - VideoProcessing_ErrorCode Destroy(VideoProcessing_Callback* callback) final; - VideoProcessing_ErrorCode BindOnError(VideoProcessing_Callback* callback, - OH_VideoProcessingCallback_OnError onError) final; - VideoProcessing_ErrorCode BindOnState(VideoProcessing_Callback* callback, - OH_VideoProcessingCallback_OnState onState) final; - VideoProcessing_ErrorCode BindOnNewOutputBuffer(VideoProcessing_Callback* callback, - OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) final; -private: - std::shared_ptr openglContext_ {nullptr}; - VideoProcessing_ErrorCode OpenGLInit(); -}; - -#endif // VIDEO_PROCESSING_CAPI_IMPL_H diff --git a/framework/capi/video_processing/video_processing_capi_impl.cpp b/framework/capi/video_processing/video_processing.cpp similarity index 62% rename from framework/capi/video_processing/video_processing_capi_impl.cpp rename to framework/capi/video_processing/video_processing.cpp index 8d5be624f3b9a3908bd083194facba1863fa2f0e..e108301c86d18ce55eff560cd24280eef9368fcd 100644 --- a/framework/capi/video_processing/video_processing_capi_impl.cpp +++ b/framework/capi/video_processing/video_processing.cpp @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,18 +13,26 @@ * limitations under the License. */ -#include "video_processing_capi_impl.h" -#include "video_processing_capi_capability.h" +#include "video_processing.h" + +#include +#include + #include "vpe_log.h" +#include "video_processing_capi_capability.h" #include "video_environment_native.h" #include "video_processing_callback_impl.h" #include "video_processing_impl.h" using namespace OHOS::Media::VideoProcessingEngine; +// NDK define +// Video processing feature types: const int32_t VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION = 0x1; const int32_t VIDEO_PROCESSING_TYPE_METADATA_GENERATION = 0x2; const int32_t VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER = 0x4; +// Video processing parameter keys: +// Detail enhancement: const char* VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL = "QualityLevel"; namespace { @@ -39,6 +47,7 @@ VideoProcessing_ErrorCode CallVideoProcessing(OH_VideoProcessing* videoProcessor "videoProcessor is invalid!"); return operation(videoProcessing); } + // Call video processing callback interface VideoProcessing_ErrorCode CallVideoProcessingCallback(VideoProcessing_Callback* callback, std::function&)>&& operation) @@ -51,51 +60,40 @@ VideoProcessing_ErrorCode CallVideoProcessingCallback(VideoProcessing_Callback* } } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::OpenGLInit() -{ - auto status = SetupOpengl(openglContext_); - CHECK_AND_RETURN_RET_LOG(status == static_cast(VIDEO_PROCESSING_SUCCESS), - VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, - "OpenGLInit SetupOpengl fail!"); - return VIDEO_PROCESSING_SUCCESS; -} - -VideoProcessing_ErrorCode VideoProcessingCapiImpl::InitializeEnvironment() +VideoProcessing_ErrorCode OH_VideoProcessing_InitializeEnvironment(void) { - CHECK_AND_RETURN_RET_LOG(OpenGLInit() == VIDEO_PROCESSING_SUCCESS, VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, - "OpenGLInit failed!"); + CHECK_AND_RETURN_RET_LOG(VideoProcessingCapiCapability::OpenGLInit() == VIDEO_PROCESSING_SUCCESS, + VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, "OpenGLInit failed!"); return VideoEnvironmentNative::Get().Initialize(); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::DeinitializeEnvironment() +VideoProcessing_ErrorCode OH_VideoProcessing_DeinitializeEnvironment(void) { return VideoEnvironmentNative::Get().Deinitialize(); } -bool VideoProcessingCapiImpl::IsColorSpaceConversionSupported( - const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, +bool OH_VideoProcessing_IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, const VideoProcessing_ColorSpaceInfo* destinationVideoInfo) { return VideoProcessingCapiCapability::IsColorSpaceConversionSupported(sourceVideoInfo, destinationVideoInfo); } -bool VideoProcessingCapiImpl::IsMetadataGenerationSupported( - const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) +bool OH_VideoProcessing_IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo) { return VideoProcessingCapiCapability::IsMetadataGenerationSupported(sourceVideoInfo); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Create(OH_VideoProcessing** videoProcessor, int type) +VideoProcessing_ErrorCode OH_VideoProcessing_Create(OH_VideoProcessing** videoProcessor, int type) { - return OH_VideoProcessing::Create(videoProcessor, type, openglContext_); + return OH_VideoProcessing::Create(videoProcessor, type, VideoProcessingCapiCapability::GetOpenGLContext()); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Destroy(OH_VideoProcessing* videoProcessor) +VideoProcessing_ErrorCode OH_VideoProcessing_Destroy(OH_VideoProcessing* videoProcessor) { return OH_VideoProcessing::Destroy(videoProcessor); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::RegisterCallback(OH_VideoProcessing* videoProcessor, +VideoProcessing_ErrorCode OH_VideoProcessing_RegisterCallback(OH_VideoProcessing* videoProcessor, const VideoProcessing_Callback* callback, void* userData) { return CallVideoProcessing(videoProcessor, [&callback, &userData](std::shared_ptr& obj) { @@ -103,7 +101,7 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::RegisterCallback(OH_VideoProc }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetSurface(OH_VideoProcessing* videoProcessor, +VideoProcessing_ErrorCode OH_VideoProcessing_SetSurface(OH_VideoProcessing* videoProcessor, const OHNativeWindow* window) { return CallVideoProcessing(videoProcessor, [&window](std::shared_ptr& obj) { @@ -111,15 +109,14 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetSurface(OH_VideoProcessing }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::GetSurface(OH_VideoProcessing* videoProcessor, - OHNativeWindow** window) +VideoProcessing_ErrorCode OH_VideoProcessing_GetSurface(OH_VideoProcessing* videoProcessor, OHNativeWindow** window) { return CallVideoProcessing(videoProcessor, [&window](std::shared_ptr& obj) { return obj->GetSurface(window); }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetParameter(OH_VideoProcessing* videoProcessor, +VideoProcessing_ErrorCode OH_VideoProcessing_SetParameter(OH_VideoProcessing* videoProcessor, const OH_AVFormat* parameter) { return CallVideoProcessing(videoProcessor, [¶meter](std::shared_ptr& obj) { @@ -127,47 +124,45 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::SetParameter(OH_VideoProcessi }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::GetParameter(OH_VideoProcessing* videoProcessor, - OH_AVFormat* parameter) +VideoProcessing_ErrorCode OH_VideoProcessing_GetParameter(OH_VideoProcessing* videoProcessor, OH_AVFormat* parameter) { return CallVideoProcessing(videoProcessor, [¶meter](std::shared_ptr& obj) { return obj->GetParameter(parameter); }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Start(OH_VideoProcessing* videoProcessor) +VideoProcessing_ErrorCode OH_VideoProcessing_Start(OH_VideoProcessing* videoProcessor) { return CallVideoProcessing(videoProcessor, [](std::shared_ptr& obj) { return obj->Start(); }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Stop(OH_VideoProcessing* videoProcessor) +VideoProcessing_ErrorCode OH_VideoProcessing_Stop(OH_VideoProcessing* videoProcessor) { return CallVideoProcessing(videoProcessor, [](std::shared_ptr& obj) { return obj->Stop(); }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::RenderOutputBuffer(OH_VideoProcessing* videoProcessor, - uint32_t index) +VideoProcessing_ErrorCode OH_VideoProcessing_RenderOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index) { return CallVideoProcessing(videoProcessor, [&index](std::shared_ptr& obj) { return obj->RenderOutputBuffer(index); }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Create(VideoProcessing_Callback** callback) +VideoProcessing_ErrorCode OH_VideoProcessingCallback_Create(VideoProcessing_Callback** callback) { return VideoProcessing_Callback::Create(callback); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::Destroy(VideoProcessing_Callback* callback) +VideoProcessing_ErrorCode OH_VideoProcessingCallback_Destroy(VideoProcessing_Callback* callback) { return VideoProcessing_Callback::Destroy(callback); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnError(VideoProcessing_Callback* callback, +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnError(VideoProcessing_Callback* callback, OH_VideoProcessingCallback_OnError onError) { return CallVideoProcessingCallback(callback, [&onError](std::shared_ptr& obj) { @@ -175,7 +170,7 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnError(VideoProcessing_C }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnState(VideoProcessing_Callback* callback, +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnState(VideoProcessing_Callback* callback, OH_VideoProcessingCallback_OnState onState) { return CallVideoProcessingCallback(callback, [&onState](std::shared_ptr& obj) { @@ -183,7 +178,7 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnState(VideoProcessing_C }); } -VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnNewOutputBuffer(VideoProcessing_Callback* callback, +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnNewOutputBuffer(VideoProcessing_Callback* callback, OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer) { return CallVideoProcessingCallback(callback, @@ -191,15 +186,3 @@ VideoProcessing_ErrorCode VideoProcessingCapiImpl::BindOnNewOutputBuffer(VideoPr return obj->BindOnNewOutputBuffer(onNewOutputBuffer); }); } - -IVideoProcessingNdk* CreateVideoProcessingNdk() -{ - return new(std::nothrow) VideoProcessingCapiImpl(); -} - -void DestroyVideoProcessingNdk(IVideoProcessingNdk* obj) -{ - CHECK_AND_RETURN_LOG(obj != nullptr, "VPE video processing is null!"); - VideoProcessingCapiImpl* impl = static_cast(obj); - delete impl; -} diff --git a/framework/capi/video_processing/video_processing_capi_capability.cpp b/framework/capi/video_processing/video_processing_capi_capability.cpp index f61649895c1c5ce8de286cf244e4044066b13dfe..91c3350a02f002acd56fa2842d5892bb4832485b 100644 --- a/framework/capi/video_processing/video_processing_capi_capability.cpp +++ b/framework/capi/video_processing/video_processing_capi_capability.cpp @@ -359,3 +359,19 @@ bool VideoProcessingCapiCapability::IsMetadataGenerationSupported( sourceVideoInfo->pixelFormat, sourceVideoInfo->colorSpace, sourceVideoInfo->metadataType); return false; } + +std::shared_ptr VideoProcessingCapiCapability::openglContext_ = nullptr; + +VideoProcessing_ErrorCode VideoProcessingCapiCapability::OpenGLInit() +{ + auto status = SetupOpengl(openglContext_); + CHECK_AND_RETURN_RET_LOG(status == static_cast(VIDEO_PROCESSING_SUCCESS), + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + "OpenGLInit SetupOpengl fail!"); + return VIDEO_PROCESSING_SUCCESS; +} + +std::shared_ptr VideoProcessingCapiCapability::GetOpenGLContext() +{ + return openglContext_; +} diff --git a/interfaces/kits/c/image_processing.h b/interfaces/kits/c/image_processing.h new file mode 100644 index 0000000000000000000000000000000000000000..1ea35f9fc44073f38fc587bbb98c322ba3397c79 --- /dev/null +++ b/interfaces/kits/c/image_processing.h @@ -0,0 +1,314 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @addtogroup ImageProcessing + * @{ + * + * @brief Provide APIs for image quality processing. + * + * @since 13 + */ + +/** + * @file image_processing.h + * + * @brief Declare image processing functions. + * + * Provides SDR content processing for images, including color space conversion, metadata generation + * and image scaling. + * + * @library libimage_processing.so + * @syscap SystemCapability.Multimedia.VideoProcessingEngine + * @kit ImageKit + * @since 13 + */ + +#ifndef VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_H +#define VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_H + +#include +#include +#include "image_processing_types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @brief Initialize global environment for image processing. + * + * This function is optional. \n + * Typically, this function is called once when the host process is started to initialize the global environment for + * image processing, which can reduce the time of {@link OH_ImageProcessing_Create}. \n + * To deinitialize global environment, call {@link OH_ImageProcessing_DeinitializeEnvironment}. + * + * @return {@link IMAGE_PROCESSING_SUCCESS} if initialization is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED} if initialization is failed. \n + * You can check if the device GPU is working properly. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_InitializeEnvironment(void); + +/** + * @brief Deinitialize global environment for image processing. + * + * This function is required if {@link OH_ImageProcessing_InitializeEnvironment} is called. Typically, this + * function is called when the host process is about to exit to deinitialize the global environment, which is + * initialized by calling {@link OH_ImageProcessing_InitializeEnvironment}. \n + * If there is some image processing instance existing, this function should not be called. \n + * If the {@link OH_ImageProcessing_InitializeEnvironment} is not called, this function should not be called. + * + * @return {@link IMAGE_PROCESSING_SUCCESS} if deinitialization is successful. \n + * {@link IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if some image processing instance is not destroyed or + * {@link OH_ImageProcessing_InitializeEnvironment} is not called. \n + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_DeinitializeEnvironment(void); + +/** + * @brief Query whether the image color space conversion is supported. + * + * @param sourceImageInfo Input image color space information pointer. + * @param destinationImageInfo Output image color space information pointer. + * @return true if the color space conversion is supported. \n + * false if the the color space conversion is unsupported. + * @since 13 + */ +bool OH_ImageProcessing_IsColorSpaceConversionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo); + +/** + * @brief Query whether the image composition is supported. + * + * @param sourceImageInfo Input image color space information pointer. + * @param sourceGainmapInfo Input gainmap color space information pointer. + * @param destinationImageInfo Output image color space information pointer. + * @return true if the image composition is supported. \n + * false if the image composition is unsupported. + * @since 13 + */ +bool OH_ImageProcessing_IsCompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* sourceGainmapInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo); + +/** + * @brief Query whether the image decomposition is supported. + * + * @param sourceImageInfo Input image color space information pointer. + * @param destinationImageInfo Output image color space information pointer. + * @param destinationGainmapInfo Output gainmap information pointer. + * @return true if the image decomposition is supported. \n + * false if the image decomposition is unsupported. + * @since 13 + */ +bool OH_ImageProcessing_IsDecompositionSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationImageInfo, + const ImageProcessing_ColorSpaceInfo* destinationGainmapInfo); + +/** + * @brief Query whether the image metadata generation is supported. + * + * @param sourceImageInfo Input image color space information pointer. + * @return true if the image metadata generation is supported.. \n + * false if the image metadata generation is unsupported. + * @since 13 + */ +bool OH_ImageProcessing_IsMetadataGenerationSupported( + const ImageProcessing_ColorSpaceInfo* sourceImageInfo); + +/** + * @brief Create an image processing instance. + * + * @param imageProcessor Output parameter. The *imageProcessor points to a new image processing object. + * The *imageProcessor must be null before passed in. + * @param type Use IMAGE_PROCESSING_TYPE_XXX to specify the processing type. The processing type of the instance can not + * be changed. + * @return {@link IMAGE_PROCESSING_SUCCESS} if creating an image processing successfully. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the type is not supported. For example, if metadata + * generation is not supported by vendor, it returns unsupported processing. \n + * {@link IMAGE_PROCESSING_ERROR_CREATE_FAILED} if failed to create an image processing. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or *instance is not null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if type is invalid. \n + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_Create(OH_ImageProcessing** imageProcessor, int32_t type); + +/** + * @brief Destroy the image processing instance. + * + * @param imageProcessor An image processing instance pointer. It is recommended setting the + * instance pointer to null after the instance is destroyed. + * @return {@link IMAGE_PROCESSING_SUCCESS} if the instance is destroyed successfully. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_Destroy(OH_ImageProcessing* imageProcessor); + +/** + * @brief Set parameter for image processing. + * + * Add parameter identified by the specified parameter key. + * + * @param imageProcessor An image processing instance pointer. + * @param parameter The parameter for image processing. + * @return {@link IMAGE_PROCESSING_SUCCESS} if setting parameter is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the parameter is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of the parameter is invalid. For example, the parameter + * contains unsupported parameter key or value. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_SetParameter(OH_ImageProcessing* imageProcessor, + const OH_AVFormat* parameter); + +/** + * @brief Get parameter of image processing. + * + * Get parameter identified by the specified parameter key. + * + * @param imageProcessor An image processing instance pointer. + * @param parameter The parameter used by the image processing instance. + * @return {@link IMAGE_PROCESSING_SUCCESS} if getting parameter is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the parameter is null. \n + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_GetParameter(OH_ImageProcessing* imageProcessor, + OH_AVFormat* parameter); + +/** + * @brief Conversion between single-layer images. + * + * The function generate the destinationImage from sourceImage. It include the colorspace conversion from + * HDR image to SDR image, SDR image to HDR image, SDR image to SDR image and HDR image to HDR image. + * + * @param imageProcessor An image processing instance pointer. The instance should be created with + * type {@link IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION}. + * @param sourceImage Input image pointer. + * @param destinationImage Output image pointer. + * @return {@link IMAGE_PROCESSING_SUCCESS} if processing image is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the image is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of image is invalid. For example, the color space + * of the image is unsupported. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the processing is not supported. \n + * {@link IMAGE_PROCESSING_ERROR_PROCESS_FAILED} if processing error occurs. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_ConvertColorSpace(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage); + +/** + * @brief Composition from dual-layer HDR images to single-layer HDR images. + * + * The function generate the destinationImage from sourceImage and sourceGainmap. + * + * @param imageProcessor An image processing instance pointer. The instance should be created with + * type {@link IMAGE_PROCESSING_TYPE_COMPOSITION}. + * @param sourceImage Input image pointer. + * @param sourceGainmap Input gainmap pointer. + * @param destinationImage Output image pointer. + * @return {@link IMAGE_PROCESSING_SUCCESS} if processing image is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the image is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of image is invalid. For example, the color space + * of the image is unsupported. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the processing is not supported. \n + * {@link IMAGE_PROCESSING_ERROR_PROCESS_FAILED} if processing error occurs. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_Compose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* sourceGainmap, OH_PixelmapNative* destinationImage); + +/** + * @brief Decomposition from single-layer HDR images to dual-layer HDR images. + * + * The function generate the destinationImage and destinationGainmap from sourceImage. + * + * @param imageProcessor An image processing instance pointer. The instance should be created with + * type {@link IMAGE_PROCESSING_TYPE_DECOMPOSITION}. + * @param sourceImage Input image pointer. + * @param destinationImage Output image pointer. + * @param destinationGainmap Output gainmap pointer. + * @return {@link IMAGE_PROCESSING_SUCCESS} if processing image is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the image is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of image is invalid. For example, the color space + * of the image is unsupported. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the processing is not supported. \n + * {@link IMAGE_PROCESSING_ERROR_PROCESS_FAILED} if processing error occurs. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_Decompose(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage, OH_PixelmapNative* destinationGainmap); + +/** + * @brief Metadata Generation for HDR images. + * + * The function generate metadata for the sourceImage. + * + * @param imageProcessor An image processing instance pointer. The instance should be created with + * type {@link IMAGE_PROCESSING_TYPE_METADATA_GENERATION}. + * @param sourceImage Input image pointer. + * @return {@link IMAGE_PROCESSING_SUCCESS} if processing image is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the image is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of image is invalid. For example, the color space + * of the image is unsupported. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the processing is not supported. \n + * {@link IMAGE_PROCESSING_ERROR_PROCESS_FAILED} if processing error occurs. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_GenerateMetadata(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage); + +/** + * @brief Clarity enhancement for images. + * + * The function generate the destinationImage from sourceImage with necessary scaling operation according to the size + * preset in the sourceImage and destinationImage. Different levels of scaling methonds are provided to balance + * performance and image quality. + * + * @param imageProcessor An image processing instance pointer. The instance should be created with + * type {@link IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER}. + * @param sourceImage Input image pointer. + * @param destinationImage Output image pointer. + * @return {@link IMAGE_PROCESSING_SUCCESS} if processing image is successful. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an image processing instance. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_PARAMETER} if the image is null. \n + * {@link IMAGE_PROCESSING_ERROR_INVALID_VALUE} if some property of image is invalid. For example, the color space + * of the image is unsupported. \n + * {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the processing is not supported. \n + * {@link IMAGE_PROCESSING_ERROR_PROCESS_FAILED} if processing error occurs. \n + * {@link IMAGE_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 13 + */ +ImageProcessing_ErrorCode OH_ImageProcessing_EnhanceDetail(OH_ImageProcessing* imageProcessor, + OH_PixelmapNative* sourceImage, OH_PixelmapNative* destinationImage); +#ifdef __cplusplus +} +#endif + +#endif // VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_H +/** @} */ diff --git a/framework/capi/config.gni b/interfaces/kits/c/image_processing/BUILD.gn similarity index 41% rename from framework/capi/config.gni rename to interfaces/kits/c/image_processing/BUILD.gn index 894fd572a4a361cee368f3017e4d391bf79f69cc..ff1239898dce28180a6245c6f34042483a892a21 100644 --- a/framework/capi/config.gni +++ b/interfaces/kits/c/image_processing/BUILD.gn @@ -3,7 +3,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -11,6 +11,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import("//foundation/multimedia/video_processing_engine/config.gni") +import("//build/ohos.gni") +import("//build/ohos/ndk/ndk.gni") -VIDEO_PROCESSING_ENGINE_CAPI_DIR = "//foundation/multimedia/media_foundation/video_processing_engine" +ohos_ndk_headers("image_processing_ndk_headers") { + dest_dir = "$ndk_headers_out_dir/multimedia/video_processing_engine" + sources = [ + "../image_processing.h", + "../image_processing_types.h", + ] +} + +ohos_ndk_library("libimage_processing_ndk") { + ndk_description_file = "./libimage_processing.ndk.json" + output_name = "image_processing" + output_extension = "so" + min_compact_version = "13" + system_capability = "SystemCapability.Multimedia.VideoProcessingEngine" + system_capability_headers = [ + "multimedia/video_processing_engine/image_processing_types.h", + "multimedia/video_processing_engine/image_processing.h", + ] +} diff --git a/interfaces/kits/c/image_processing/libimage_processing.ndk.json b/interfaces/kits/c/image_processing/libimage_processing.ndk.json new file mode 100644 index 0000000000000000000000000000000000000000..918e744ee31dd9e7ed26bfb6aa6c125780860029 --- /dev/null +++ b/interfaces/kits/c/image_processing/libimage_processing.ndk.json @@ -0,0 +1,92 @@ +[ + { + "first_introduced": "13", + "name": "OH_ImageProcessing_InitializeEnvironment" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_DeinitializeEnvironment" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_IsColorSpaceConversionSupported" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_IsCompositionSupported" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_IsDecompositionSupported" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_IsMetadataGenerationSupported" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_Create" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_Destroy" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_SetParameter" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_GetParameter" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_ConvertColorSpace" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_Compose" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_Decompose" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_GenerateMetadata" + }, + { + "first_introduced": "13", + "name": "OH_ImageProcessing_EnhanceDetail" + }, + { + "first_introduced": "13", + "name": "IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION", + "type": "variable" + }, + { + "first_introduced": "13", + "name": "IMAGE_PROCESSING_TYPE_COMPOSITION", + "type": "variable" + }, + { + "first_introduced": "13", + "name": "IMAGE_PROCESSING_TYPE_DECOMPOSITION", + "type": "variable" + }, + { + "first_introduced": "13", + "name": "IMAGE_PROCESSING_TYPE_METADATA_GENERATION", + "type": "variable" + }, + { + "first_introduced": "13", + "name": "IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER", + "type": "variable" + }, + { + "first_introduced": "13", + "name": "IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL", + "type": "variable" + } +] diff --git a/interfaces/kits/c/image_processing_types.h b/interfaces/kits/c/image_processing_types.h new file mode 100644 index 0000000000000000000000000000000000000000..d42f565869b5582e6c6773fc4812266799f9033a --- /dev/null +++ b/interfaces/kits/c/image_processing_types.h @@ -0,0 +1,229 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @addtogroup ImageProcessing + * @{ + * + * @brief Provide image processing including color space conversion and metadata generation. + * + * @since 13 + */ + +/** + * @file image_processing_types.h + * + * @brief Type definitions for image processing. + * + * @library libimage_processing.so + * @syscap SystemCapability.Multimedia.VideoProcessingEngine + * @kit ImageKit + * @since 13 + */ + +#ifndef VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_TYPES_H +#define VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_TYPES_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @brief Define the object for image processing. + * + * Define a null pointer of OH_ImageProcessing and call {@link OH_ImageProcessing_Create} to create an image processing + * instance. The pointer should be null before creating instance. + * User can create multiple image processing instances for different processing types. + * + * @since 13 + */ +typedef struct OH_ImageProcessing OH_ImageProcessing; + +/** + * @brief Forward declaration of OH_PixelmapNative. + * + * @since 13 + */ +typedef struct OH_PixelmapNative OH_PixelmapNative; + +/** + * @brief Forward declaration of OH_AVFormat. + * + * @since 13 + */ +typedef struct OH_AVFormat OH_AVFormat; + +/** + * @brief Used to create an image processing instance for color space conversion. + * + * Color space conversion includes the conversion of single-layer HDR images to SDR images, as well as + * the color space conversion of SDR images, and the conversion of SDR images to single-layer HDR images. Some + * capabilities are supported by vendor. Use {@link OH_ImageProcessing_IsColorSpaceConversionSupported} to query if + * the conversion is supported between single-layer images. + * + * @see OH_ImageProcessing_Create + * @since 13 + */ +extern const int32_t IMAGE_PROCESSING_TYPE_COLOR_SPACE_CONVERSION; + +/** + * @brief Used to create an image processing instance for HDR image composition. + * + * HDR image compose includes the conversion from dual-layer HDR images to single-layer HDR images. Some + * capabilities are supported by vendor. Use {@link OH_ImageProcessing_IsCompositionSupported} to + * query if the composition is supported from dual-layer HDR image to single-layer HDR image. + * + * @see OH_ImageProcessing_Create + * @since 13 + */ +extern const int32_t IMAGE_PROCESSING_TYPE_COMPOSITION; + +/** + * @brief Used to create an image processing instance for HDR image decomposition. + * + * HDR image decompose includes the conversion from single-layer HDR images to dual-layer HDR images. Some + * capabilities are supported by vendor. Use {@link OH_ImageProcessing_IsDecompositionSupported} to + * query if the decomposition is supported from single-layer image to dual-layer HDR image. + * + * @see OH_ImageProcessing_Create + * @since 13 + */ +extern const int32_t IMAGE_PROCESSING_TYPE_DECOMPOSITION; + +/** + * @brief Used to create an image processing instance for metadata generation. + * + * Generate HDR Vivid metadata for single-layer image. The capability is supported by vendor. If the capability is not + * supported, {@link OH_ImageProcessing_Create} returns {@link IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING}. + * + * @see OH_ImageProcessing_Create + * @since 13 + */ +extern const int32_t IMAGE_PROCESSING_TYPE_METADATA_GENERATION; + +/** + * @brief Used to create an image processing instance for detail enhancement. + * + * Scale or resize images with the specified quality or just enhance details for rendering an image without changing + * its resolution. + * + * @see OH_ImageProcessing_Create + * @since 13 + */ +extern const int32_t IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER; + +/** + * @brief The key is used to specify the quality level for image detail enhancement. + * + * See {@link ImageDetailEnhancer_QualityLevel} for its value. + * Use {@link OH_ImageProcessing_SetParameter} to set the quality level. + * Use {@link OH_ImageProcessing_GetParameter} to get the current quality level. + * + * @see OH_VideoProcessing_SetParameter + * @see OH_VideoProcessing_GetParameter + * @since 13 + */ +extern const char* IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL; + +/** + * @brief The color space information is used for color space conversion capability query. + * + * @see OH_ImageProcessing_IsColorSpaceConversionSupported + * @see OH_ImageProcessing_IsCompositionSupported + * @see OH_ImageProcessing_IsDecompositionSupported + * @since 13 + */ +typedef struct ImageProcessing_ColorSpaceInfo { + /** define metadata type, {@link enum OH_Pixelmap_HdrMetadataKey} */ + int32_t metadataType; + /** define color space, {@link enum ColorSpaceName} */ + int32_t colorSpace; + /** define pixel format, {@link enum PIXEL_FORMAT} */ + int32_t pixelFormat; +} ImageProcessing_ColorSpaceInfo; + +/** + * @brief The quality level is used for detail enhancement. + * + * It is the value of the key parameter {@link IMAGE_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL}. + * + * @see OH_ImageProcessing_SetParameter + * @see OH_ImageProcessing_GetParameter + * @since 13 + */ +typedef enum ImageDetailEnhancer_QualityLevel { + /** No detail enhancement */ + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_NONE, + /** A low level of detail enhancement quality but with a fast speed. It's the default level */ + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_LOW, + /** A medium level of detail enhancement quality. Its speed is between the low setting and high setting */ + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_MEDIUM, + /** A high level of detail enhancement quality but with a relatively slow speed */ + IMAGE_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH, +} ImageDetailEnhancer_QualityLevel; + +/** + * @brief Image processing error code. + * + * @since 13 + */ +typedef enum ImageProcessing_ErrorCode { + /** @error Operation is successful. */ + IMAGE_PROCESSING_SUCCESS, + /** @error Input parameter is invalid. This error is returned for all of the following error conditions: + * 1 - Invalid input or output image buffer - The image buffer is null. + * 2 - Invalid parameter - The parameter is null. + * 3 - Invalid type - The type passed in the create function does not exist. + */ + IMAGE_PROCESSING_ERROR_INVALID_PARAMETER = 401, + /** @error Some unknown error occurred, such as GPU calculation failure or memcpy failure. */ + IMAGE_PROCESSING_ERROR_UNKNOWN = 29200001, + /** @error The global environment initialization for image processing failed, such as failure to initialize + * the GPU environment. + */ + IMAGE_PROCESSING_ERROR_INITIALIZE_FAILED, + /** @error Failed to create image processing instance. For example, + * the number of instances exceeds the upper limit. + */ + IMAGE_PROCESSING_ERROR_CREATE_FAILED, + /** @error Failed to process image buffer. For example, the processing times out. */ + IMAGE_PROCESSING_ERROR_PROCESS_FAILED, + /** @error The processing is not supported. You may call OH_ImageProcessing_IsXXXSupported + * to check whether the capability is supported. + */ + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + /** @error The operation is not permitted. This may be caused by incorrect status. */ + IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + /** @error Out of memory. */ + IMAGE_PROCESSING_ERROR_NO_MEMORY, + /** @error The image processing instance is invalid. This may be caused by null instance. */ + IMAGE_PROCESSING_ERROR_INVALID_INSTANCE, + /** @error Input value is invalid. This error is returned for all of the following error conditions: + * 1 - Invalid input or output image buffer - The image buffer width(height) + * is too large or colorspace is incorrect. + * 2 - Invalid parameter - The parameter does not contain valid information, + * such as detail enhancer level is incorrect. + */ + IMAGE_PROCESSING_ERROR_INVALID_VALUE +} ImageProcessing_ErrorCode; + +#ifdef __cplusplus +} +#endif + +#endif // VIDEO_PROCESSING_ENGINE_C_API_IMAGE_PROCESSING_TYPES_H +/** @} */ diff --git a/interfaces/kits/c/video_processing.h b/interfaces/kits/c/video_processing.h new file mode 100644 index 0000000000000000000000000000000000000000..7c22782c6d01d5773649fc92243826d753ad495c --- /dev/null +++ b/interfaces/kits/c/video_processing.h @@ -0,0 +1,329 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @addtogroup VideoProcessing + * @{ + * + * @brief Provide APIs for video quality processing. + * + * @since 12 + */ + +/** + * @file video_processing.h + * + * @brief Declare video processing functions. + * + * Provides SDR content processing for videos, including color space conversion, metadata generation + * and video scaling. + * + * @library libvideo_processing.so + * @syscap SystemCapability.Multimedia.VideoProcessingEngine + * @kit MediaKit + * @since 12 + */ + +#ifndef VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_H +#define VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_H + +#include +#include +#include "video_processing_types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @brief Initialize global environment for video processing. + * + * This function is optional. \n + * Typically, this function is called once when the host process is started to initialize the global environment for + * video processing, which can reduce the time of {@link OH_VideoProcessing_Create}. \n + * To deinitialize global environment, call {@link OH_VideoProcessing_DeinitializeEnvironment}. + * + * @return {@link VIDEO_PROCESSING_SUCCESS} if initialization is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED} if initialization is failed. \n + * You can check if the device GPU is working properly. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_InitializeEnvironment(void); + +/** + * @brief Deinitialize global environment for video processing. + * + * This function is required if {@link OH_VideoProcessing_InitializeEnvironment} is called. Typically, this + * function is called when the host process is about to exit to deinitialize the global environment, which is + * initialized by calling {@link OH_VideoProcessing_InitializeEnvironment}. \n + * If there is some video processing instance existing, this function should not be called. \n + * If the {@link OH_VideoProcessing_InitializeEnvironment} is not called, this function should not be called. + * + * @return {@link VIDEO_PROCESSING_SUCCESS} if deinitialization is successful. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if some video processing instance is not destroyed or + * {@link OH_VideoProcessing_InitializeEnvironment} is not called. \n + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_DeinitializeEnvironment(void); + +/** + * @brief Query if the video color space conversion is supported. + * + * @param sourceVideoInfo Source video color space information. + * @param destinationVideoInfo Destination video color space information. + * @return true if the video color space conversion is supported. \n + * false if the video color space conversion is not supported. + * @since 12 + */ +bool OH_VideoProcessing_IsColorSpaceConversionSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, + const VideoProcessing_ColorSpaceInfo* destinationVideoInfo); + +/** + * @brief Query if the video metadata generation is supported. + * + * @param sourceVideoInfo Source video color space information. + * @return true if the video metadata generation is supported. \n + * false if the video metadata generation is not supported. + * @since 12 + */ +bool OH_VideoProcessing_IsMetadataGenerationSupported( + const VideoProcessing_ColorSpaceInfo* sourceVideoInfo); + +/** + * @brief Create a video processing instance. + * + * @param videoProcessor Output parameter. The *videoProcessor points to a new video processing object. + * The *videoProcessor must be null before passed in. + * @param type Use VIDEO_PROCESSING_TYPE_XXX to specify the processing type. The processing type of the instance can not + * be changed. + * @return {@link VIDEO_PROCESSING_SUCCESS} if creating a video processing instance successfully. \n + * {@link VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING} if the type is not supported. For example, if metadata + * generation is not supported by vendor, it returns unsupported processing. \n + * {@link VIDEO_PROCESSING_ERROR_CREATE_FAILED} if failed to create a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or *instance is not null. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if type is invalid. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_Create(OH_VideoProcessing** videoProcessor, int type); + +/** + * @brief Destroy the video processing instance. + * + * Stop the instance before destroying it. see {@link OH_VideoProcessing_Stop}. \n + * + * @param videoProcessor The video processing instance pointer to be destroyed. It is recommended setting the + * instance pointer to null after the instance is destroyed. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the instance is destroyed successfully . \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if the instance is still running. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_Destroy(OH_VideoProcessing* videoProcessor); + +/** + * @brief Register callback object. + * + * Register the callback object before starting video processing. + * + * @param videoProcessor A video processing instance pointer. + * @param callback Callback pointer to be registered. + * @param userData User's custom data pointer. + * @return {@link VIDEO_PROCESSING_SUCCESS} if callback is registered successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if callback is null. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if video processing instance is running. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_RegisterCallback(OH_VideoProcessing* videoProcessor, + const VideoProcessing_Callback* callback, void* userData); + +/** + * @brief Set the output surface for video processing. + * + * Set the output surface before starting video processing. + * + * @param videoProcessor A video processing instance pointer. + * @param window The output surface pointer. + * @return {@link VIDEO_PROCESSING_SUCCESS} if setting output surface successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if window is null. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_SetSurface(OH_VideoProcessing* videoProcessor, + const OHNativeWindow* window); + +/** + * @brief Create an input surface. + * + * Create the input surface before starting video processing. + * Call {@link OH_NativeWindow_DestroyNativeWindow} to destroy the input surface. + * + * @param videoProcessor A video processing instance pointer. + * @param window The input surface pointer. For example, it is the output surface of a video decoder. + * @return {@link VIDEO_PROCESSING_SUCCESS} if operation is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if window is null or *window is not null. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if creating surface failed, input surface is already created + * or video processing instance is running. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_GetSurface(OH_VideoProcessing* videoProcessor, OHNativeWindow** window); + +/** + * @brief Set parameter for video processing. + * + * Add parameter identified by the specified parameter key. + * + * @param videoProcessor An video processing instance pointer. + * @param parameter The parameter for video processing. + * @return {@link VIDEO_PROCESSING_SUCCESS} if setting parameter is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if the parameter is null. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_VALUE} if some property of the parameter is invalid. For example, the parameter + * contains unsupported parameter key or value. \n + * {@link VIDEO_PROCESSING_ERROR_NO_MEMORY} if memory allocation failed. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_SetParameter(OH_VideoProcessing* videoProcessor, + const OH_AVFormat* parameter); + +/** + * @brief Get parameter of video processing. + * + * Get parameter identified by the specified parameter key. + * + * @param videoProcessor An video processing instance pointer. + * @param parameter The parameter used by the video processing instance. + * @return {@link VIDEO_PROCESSING_SUCCESS} if getting parameter is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not an video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if the parameter is null. \n + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_GetParameter(OH_VideoProcessing* videoProcessor, OH_AVFormat* parameter); + +/** + * @brief Start video processing instance. + * + * After successfully calling this function, the state {@link VIDEO_PROCESSING_STATE_RUNNING} is reported by callback + * function {@link OH_VideoProcessingCallback_OnState}. + * + * @param videoProcessor A video processing instance pointer. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the operation is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if output surface is not set, input surface is not created or + * instance is already running. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_Start(OH_VideoProcessing* videoProcessor); + +/** + * @brief To stop video processing instance. + * + * After the video processing instance is stopped successfully, the state {@link VIDEO_PROCESSING_STATE_STOPPED} is + * reported by callback function {@link OH_VideoProcessing_OnState}. + * + * @param videoProcessor A video processing instance pointer. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the operation is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if instance is already stopped. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_Stop(OH_VideoProcessing* videoProcessor); + +/** + * @brief Send the output buffer out. + * + * If the callback function {@link OH_VideoProcessingCallback_OnNewOutputBuffer} is set, the buffer's index is reported + * to user by the callback function when an output buffer is ready. + * + * @param videoProcessor A video processing instance pointer. + * @param index The output buffer's index. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the operation is successful. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_INSTANCE} if instance is null or not a video processing instance. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if index is invalid. \n + * {@link VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED} if callback {@link OH_VideoProcessing_OnNewOutputBuffer} is + * not set or instance is stopped. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessing_RenderOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index); + +/** + * @brief Create a video processing callback object. + * + * @param callback Output parameter. The *callback points to a new callback object. The *callback should be null before + * creating the callback object. + * @return {@link VIDEO_PROCESSING_SUCCESS} if callback object is created successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if callback is null or *callback is not null. \n + * {@link VIDEO_PROCESSING_ERROR_NO_MEMORY} if out of memory. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessingCallback_Create(VideoProcessing_Callback** callback); + +/** + * @brief Destroy the callback object. + * + * The callback object can be destroyed after it is registered to video processing instance. + * + * @param callback The callback object pointer. It is recommended setting the callback pointer to null after the + * callback object is destroyed. + * @return {@link VIDEO_PROCESSING_SUCCESS} if callback is successfully destroyed. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if callback is null. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessingCallback_Destroy(VideoProcessing_Callback* callback); + +/** + * @brief Bind the {@link OH_VideoProcessingCallback_OnError} callback function to callback object. + * + * @param callback A callback object pointer. + * @param onError The callback function. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the function is bound to callback object successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if the callback is null or onError is null. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnError(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnError onError); + +/** + * @brief Bind the {@link OH_VideoProcessingCallback_OnState} callback function to callback object. + * + * @param callback A callback object pointer. + * @param onState The callback function. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the function is bound to callback object successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if the callback is null or onState is null. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnState(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnState onState); + +/** + * @brief Bind the {@link OH_VideoProcessingCallback_OnNewOutputBuffer} callback function to callback object. + * + * @param callback A callback object pointer. + * @param onNewOutputBuffer The callback function. + * @return {@link VIDEO_PROCESSING_SUCCESS} if the function is bound to callback object successfully. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_PARAMETER} if the callback is null. + * @since 12 + */ +VideoProcessing_ErrorCode OH_VideoProcessingCallback_BindOnNewOutputBuffer(VideoProcessing_Callback* callback, + OH_VideoProcessingCallback_OnNewOutputBuffer onNewOutputBuffer); + +#ifdef __cplusplus +} +#endif + +#endif // VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_H +/** @} */ diff --git a/interfaces/kits/c/video_processing/BUILD.gn b/interfaces/kits/c/video_processing/BUILD.gn new file mode 100644 index 0000000000000000000000000000000000000000..0dc56d02583cefe8084b36cea30c3d8ef7455f0a --- /dev/null +++ b/interfaces/kits/c/video_processing/BUILD.gn @@ -0,0 +1,35 @@ +# Copyright (c) 2024 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import("//build/ohos.gni") +import("//build/ohos/ndk/ndk.gni") + +ohos_ndk_headers("video_processing_ndk_headers") { + dest_dir = "$ndk_headers_out_dir/multimedia/video_processing_engine" + sources = [ + "../video_processing.h", + "../video_processing_types.h", + ] +} + +ohos_ndk_library("libvideo_processing_ndk") { + ndk_description_file = "./libvideo_processing.ndk.json" + output_name = "video_processing" + output_extension = "so" + min_compact_version = "12" + system_capability = "SystemCapability.Multimedia.VideoProcessingEngine" + system_capability_headers = [ + "multimedia/video_processing_engine/video_processing_types.h", + "multimedia/video_processing_engine/video_processing.h", + ] +} diff --git a/interfaces/kits/c/video_processing/libvideo_processing.ndk.json b/interfaces/kits/c/video_processing/libvideo_processing.ndk.json new file mode 100644 index 0000000000000000000000000000000000000000..19fc6e2cc0dfa42b931163299b783ed0a34a8248 --- /dev/null +++ b/interfaces/kits/c/video_processing/libvideo_processing.ndk.json @@ -0,0 +1,98 @@ +[ + { + "first_introduced": "12", + "name": "OH_VideoProcessing_InitializeEnvironment" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_DeinitializeEnvironment" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_IsColorSpaceConversionSupported" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_IsMetadataGenerationSupported" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_Create" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_Destroy" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_RegisterCallback" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_SetSurface" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_GetSurface" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_SetParameter" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_GetParameter" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_Start" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_Stop" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessing_RenderOutputBuffer" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessingCallback_Create" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessingCallback_Destroy" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessingCallback_BindOnError" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessingCallback_BindOnState" + }, + { + "first_introduced": "12", + "name": "OH_VideoProcessingCallback_BindOnNewOutputBuffer" + }, + { + "first_introduced": "12", + "name": "VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION", + "type": "variable" + }, + { + "first_introduced": "12", + "name": "VIDEO_PROCESSING_TYPE_METADATA_GENERATION", + "type": "variable" + }, + { + "first_introduced": "12", + "name": "VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER", + "type": "variable" + }, + { + "first_introduced": "12", + "name": "VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL", + "type": "variable" + } +] diff --git a/interfaces/kits/c/video_processing_types.h b/interfaces/kits/c/video_processing_types.h new file mode 100644 index 0000000000000000000000000000000000000000..d863e2cf37bcc7c1a1bf3c3dcd78bccf7d1c6d7a --- /dev/null +++ b/interfaces/kits/c/video_processing_types.h @@ -0,0 +1,278 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @addtogroup VideoProcessing + * @{ + * + * @brief Provide video processing including color space conversion and metadata generation. + * + * @since 12 + */ + +/** + * @file video_processing_types.h + * + * @brief Type definitions for video processing. + * + * @library libvideo_processing.so + * @syscap SystemCapability.Multimedia.VideoProcessingEngine + * @kit MediaKit + * @since 12 + */ + +#ifndef VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_TYPES_H +#define VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_TYPES_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @brief Define the video processing object. + * + * Define a null pointer of OH_VideoProcessing and call {@link OH_VideoProcessing_Create} to create a video processing + * instance. The pointer should be null before creating instance. + * User can create multiple video processing instances for different processing types. + * + * @since 12 + */ +typedef struct OH_VideoProcessing OH_VideoProcessing; + +/** + * @brief Forward declaration of NativeWindow. + * + * @since 12 + */ +typedef struct NativeWindow OHNativeWindow; + +/** + * @brief Forward declaration of OH_AVFormat. + * + * @since 12 + */ +typedef struct OH_AVFormat OH_AVFormat; + +/** + * @brief Used to create a video processing instance for color space conversion. + * + * Some capabilities are supported by vendor. Use {@link OH_VideoProcessing_IsColorSpaceConversionSupported} to query if + * the conversion is supported. + * + * @see OH_VideoProcessing_Create + * @since 12 + */ +extern const int32_t VIDEO_PROCESSING_TYPE_COLOR_SPACE_CONVERSION; + +/** + * @brief Used to create a video processing instance for metadata generation. + * + * Generate HDR vivid metadata for video. The capability is supported by vendor. If the capability is not supported, + * {@link OH_VideoProcessing_Create} returns {@link VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING}. + * + * @see OH_VideoProcessing_Create + * @since 12 + */ +extern const int32_t VIDEO_PROCESSING_TYPE_METADATA_GENERATION; + +/** + * @brief Used to create an video processing instance of detail enhancement. + * + * Scale or resize video with the specified quality or just enhance details for rendering without changing its + * resolution. + * + * @see OH_ImageProcessing_Create + * @since 12 + */ +extern const int32_t VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER; + +/** + * @brief The key is used to specify the quality level for video detail enhancement. + * + * See {@link VideoDetailEnhancer_QualityLevel} for its values. + * Use {@link OH_VideoProcessing_SetParameter} to set the quality level. + * Use {@link OH_VideoProcessing_GetParameter} to get the current quality level. + * + * @see OH_VideoProcessing_SetParameter + * @see OH_VideoProcessing_GetParameter + * @since 12 + */ +extern const char* VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL; + +/** + * @brief Video color space information structure of querying if video color space conversion is supported. + * + * @see OH_VideoProcessing_IsColorSpaceConversionSupported + * @since 12 + */ +typedef struct VideoProcessing_ColorSpaceInfo { + /** The metadata type of the video, see {@link enum OH_NativeBuffer_MetadataType} */ + int32_t metadataType; + /** The color space type of the video, see {@link enum OH_NativeBuffer_ColorSpace} */ + int32_t colorSpace; + /** The pixel format of the video, see {@link enum OH_NativeBuffer_Format} */ + int32_t pixelFormat; +} VideoProcessing_ColorSpaceInfo; + +/** + * @brief The quality level is used for detail enhancement. + * + * It is the value of the key parameter {@link VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL}. + * + * @see OH_VideoProcessing_SetParameter + * @see OH_VideoProcessing_GetParameter + * @since 12 + */ +typedef enum VideoDetailEnhancer_QualityLevel { + /** No detail enhancement */ + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_NONE, + /** A low level of detail enhancement quality but with a fast speed. It's the default level */ + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_LOW, + /** A medium level of detail enhancement quality. Its speed is between the low setting and high setting */ + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_MEDIUM, + /** A high level of detail enhancement quality but with a relatively slow speed */ + VIDEO_DETAIL_ENHANCER_QUALITY_LEVEL_HIGH, +} VideoDetailEnhancer_QualityLevel; + +/** + * @brief Video processing error code. + * + * @since 12 + */ +typedef enum VideoProcessing_ErrorCode { + /** @error Operation is successful. */ + VIDEO_PROCESSING_SUCCESS, + /** @error Input parameter is invalid. This error is returned for all of the following error conditions: + * 1 - Invalid input or output video buffer - The video buffer is null. + * 2 - Invalid parameter - The parameter is null. + * 3 - Invalid type - The type passed in the create function does not exist. + */ + VIDEO_PROCESSING_ERROR_INVALID_PARAMETER = 401, + /** @error Some unknown error occurred, such as GPU calculation failure or memcpy failure. */ + VIDEO_PROCESSING_ERROR_UNKNOWN = 29210001, + /** @error The global environment initialization for video processing failed, such as failure to initialize + * the GPU environment. + */ + VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, + /** @error Failed to create video processing instance. For example, + * the number of instances exceeds the upper limit. + */ + VIDEO_PROCESSING_ERROR_CREATE_FAILED, + /** @error Failed to process video buffer. For example, the processing times out. */ + VIDEO_PROCESSING_ERROR_PROCESS_FAILED, + /** @error The processing is not supported. You may call OH_VideoProcessing_IsXXXSupported + * to check whether the capability is supported. + */ + VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, + /** @error The operation is not permitted. This may be caused by incorrect status. */ + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, + /** @error Out of memory. */ + VIDEO_PROCESSING_ERROR_NO_MEMORY, + /** @error The video processing instance is invalid. This may be caused by null instance. */ + VIDEO_PROCESSING_ERROR_INVALID_INSTANCE, + /** @error Input value is invalid. This error is returned for all of the following error conditions: + * 1 - Invalid input or output video buffer - The video buffer width(height) + * is too large or colorspace is incorrect. + * 2 - Invalid parameter - The parameter does not contain valid information, + * such as detail enhancer level is incorrect. + */ + VIDEO_PROCESSING_ERROR_INVALID_VALUE +} VideoProcessing_ErrorCode; + +/** + * @brief Video processing states. + * + * The state is reported to user by callback function {@link OH_VideoProcessing_OnState}. + * + * @since 12 + */ +typedef enum VideoProcessing_State { + /** Video processing is running */ + VIDEO_PROCESSING_STATE_RUNNING, + /** Video processing is stopped */ + VIDEO_PROCESSING_STATE_STOPPED +} VideoProcessing_State; + +/** + * @brief Video processing asynchronous callback object type. + * + * Define a null pointer of VideoProcessing_Callback and call {@link OH_VideoProcessingCallback_Create} to create a + * callback object. The pointer should be null before creating the callback object. + * Register the callback to a video processing instance by calling {@link OH_VideoProcessing_RegisterCallback}. + * + * @since 12 + */ +typedef struct VideoProcessing_Callback VideoProcessing_Callback; + +/** + * @brief The callback function pointer definition for reporting error during video processing. + * + * Errors: \n + * {@link VIDEO_PROCESSING_ERROR_UNSUPPORTED_PROCESSING}, the processing is not supported. For example, the + * color space conversion according to the source and destination videos' properties is not supported. \n + * {@link VIDEO_PROCESSING_ERROR_INVALID_VALUE}, some property of the video is invalid. For example, the color space of + * the video is invalid. \n + * {@link VIDEO_PROCESSING_ERROR_NO_MEMORY}, out of memory. \n + * {@link VIDEO_PROCESSING_ERROR_PROCESS_FAILED}, some processing error occurs. \n + * For more errors, see {@link VideoProcessing_ErrorCode}. + * + * @param videoProcessor The video processing instance. + * @param error Error code reporting to user. + * @param userData User's custom data. + * @since 12 + */ +typedef void (*OH_VideoProcessingCallback_OnError)(OH_VideoProcessing* videoProcessor, + VideoProcessing_ErrorCode error, void* userData); + +/** + * @brief The callback function pointer definition for reporting video processing state. + * + * The state will be {@link VIDEO_PROCESSING_STATE_RUNNING} after {@link OH_VideoProcessing_Start} is called + * successfully. + * The state will be {@link VIDEO_PROCESSING_STATE_STOPPED} after all the buffers cached before + * {@link OH_VideoProcessing_Stop} is called are processed. + * + * @param videoProcessor The video processing instance. + * @param state see {@link VideoProcessing_State}. + * @param userData User's custom data. + * @since 12 + */ +typedef void (*OH_VideoProcessingCallback_OnState)(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, + void* userData); + +/** + * @brief The callback function pointer definition for reporting a new output buffer is filled with processed data. + * + * Every new output buffer's index will report to user once the buffer is filled with processed data. Then call + * {@link OH_VideoProcessing_RenderOutputBuffer} with the buffer's index to send the output buffer out. + * If this function is not registered, the output buffer is sent out as soon as the buffer is filled with processed + * data without reporting. + * + * @param videoProcessor The video processing instance. + * @param index The index of the new output buffer. + * @param userData The user's custom data. + * @since 12 + */ +typedef void (*OH_VideoProcessingCallback_OnNewOutputBuffer)(OH_VideoProcessing* videoProcessor, uint32_t index, + void* userData); + +#ifdef __cplusplus +} +#endif + +#endif // VIDEO_PROCESSING_ENGINE_C_API_VIDEO_PROCESSING_TYPES_H +/** @} */ diff --git a/patches/patches.json b/patches/patches.json new file mode 100644 index 0000000000000000000000000000000000000000..92a2114498d704e7953481cc1835507e25656671 --- /dev/null +++ b/patches/patches.json @@ -0,0 +1,14 @@ +{ + "patches": [ + { + "project":"multimedia_media_foundation", + "path":"foundation/multimedia/media_foundation", + "pr_url":"https://gitee.com/openharmony/multimedia_media_foundation/pulls/1897" + }, + { + "project":"vendor_hihope", + "path":"vendor/hihope", + "pr_url":"https://gitee.com/openharmony/vendor_hihope/pulls/1420" + } + ] +} \ No newline at end of file diff --git a/services/BUILD.gn b/services/BUILD.gn index 81d6ac7ac091da735f67ea1ad5ed544a338f85f6..44fcb956e807b39d71184c55c55a877c2dbf3ece 100644 --- a/services/BUILD.gn +++ b/services/BUILD.gn @@ -13,7 +13,7 @@ import("//build/config/ohos/config.gni") import("//build/ohos.gni") -import("//foundation/ability/idl_tool/idl_config.gni") +import("//build/config/components/idl_tool/idl.gni") import("//foundation/multimedia/video_processing_engine/config.gni") group("video_processing_service_group") { diff --git a/test/moduletest/colorspace_converter_video/api_test.cpp b/test/moduletest/colorspace_converter_video/api_test.cpp index 7657125730769c4cb35fb97a1b6d6f1c970bdaa3..36336aba223a7dc0a227fef4373d9c2fa55e7ac7 100644 --- a/test/moduletest/colorspace_converter_video/api_test.cpp +++ b/test/moduletest/colorspace_converter_video/api_test.cpp @@ -393,6 +393,7 @@ HWTEST_F(CSCVInnerApiTest, CSCV_API_0061, TestSize.Level2) ASSERT_EQ(err, GSERROR_OK); ret = cscv_->NotifyEos(); ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + cout << "begin to call cscv2->Release()" << endl; cscv2->Release(); } diff --git a/test/ndk/BUILD.gn b/test/ndk/BUILD.gn index 07807777f20647c822801f41424419be5a0ba8fe..529184e0269129e8317611ca13c97d846a25bec1 100644 --- a/test/ndk/BUILD.gn +++ b/test/ndk/BUILD.gn @@ -12,7 +12,6 @@ # limitations under the License. import("//build/ohos.gni") -import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") group("vpe_module_test") { testonly = true diff --git a/test/ndk/moduletest/video/BUILD.gn b/test/ndk/moduletest/video/BUILD.gn index 0ecdc806af717a6f127af1c2c64c22be74f423b5..7c907cda74d5334435749db5aa2af9102933d7d6 100644 --- a/test/ndk/moduletest/video/BUILD.gn +++ b/test/ndk/moduletest/video/BUILD.gn @@ -13,7 +13,6 @@ import("//build/test.gni") import("//foundation/multimedia/media_foundation/config.gni") -import("//foundation/multimedia/media_foundation/video_processing_engine/config.gni") video_moduletest_native_include_dirs = [ "$vpe_interface_capi_dir", diff --git a/test/unittest/colorSpace_converter_video_ndk/BUILD.gn b/test/unittest/colorSpace_converter_video_ndk/BUILD.gn index 36a20ad2f90440bf8bc55133b140126c74c80e17..63e365bd324efb6a1fd8ee7328140146a02444ab 100644 --- a/test/unittest/colorSpace_converter_video_ndk/BUILD.gn +++ b/test/unittest/colorSpace_converter_video_ndk/BUILD.gn @@ -41,13 +41,16 @@ ohos_unittest("colorSpace_converter_video_ndk_unit_test") { "$FRAMEWORK_DIR/dfx/include/", "$FRAMEWORK_DIR/capi/video_processing/include/", "$COLORSPACE_CONVERTER_VIDEO_DIR/include/", + "//foundation/multimedia/media_foundation/interface/kits/c/", + "//foundation/graphic/graphic_2d/interfaces/inner_api/", + "//foundation/graphic/graphic_2d_ext/ohcore/graphic_compat_layer/include/utils/", ] sources = [ "colorSpace_converter_video_ndk_unit_test.cpp" ] deps = [ "$FRAMEWORK_DIR:videoprocessingengine", - "$FRAMEWORK_DIR:video_processing_capi_impl", + "$FRAMEWORK_DIR:video_processing", ] external_deps = [ @@ -59,9 +62,7 @@ ohos_unittest("colorSpace_converter_video_ndk_unit_test") { "graphic_2d:librender_service_client", "hilog:libhilog", "hitrace:hitrace_meter", - "media_foundation:media_foundation", "media_foundation:native_media_core", - "media_foundation:video_processing", "ipc:ipc_core", "memory_utils:libdmabufheap", ] diff --git a/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp index ed67092d61d059175459bdb5f8fe825ea80ff090..b9bdd108c79a5ff6dc511282d650f3aa38eee7a5 100644 --- a/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp +++ b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp @@ -561,6 +561,8 @@ HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, codeCoverageComponent_20, Test SetMeatadata(buffer, (uint32_t)inMetaType_); window->surface->FlushBuffer(buffer, fence_, flushCfg_); OH_VideoProcessing_SetSurface(instance, window2); + int sleepTime = 2; + sleep(sleepTime); OH_VideoProcessing_Destroy(instance); OH_VideoProcessing_Destroy(instance2); OH_VideoProcessing_DeinitializeEnvironment(); diff --git a/test/unittest/detail_enhancer_video_ndk/BUILD.gn b/test/unittest/detail_enhancer_video_ndk/BUILD.gn index 28b8a50fc9842b56b5afad41de83868363d387eb..57e372718b7055305aa5b1cc2430d50de66c60ca 100644 --- a/test/unittest/detail_enhancer_video_ndk/BUILD.gn +++ b/test/unittest/detail_enhancer_video_ndk/BUILD.gn @@ -36,15 +36,14 @@ ohos_unittest("detail_enhancer_video_ndk_unit_test") { "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/../media_foundation/interface/kits/c", "$ALGORITHM_DIR/detail_enhancer_video/include", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", - "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c", ] sources = [ "detail_enhancer_video_ndk_unit_test.cpp" ] deps = [ "$FRAMEWORK_DIR:videoprocessingengine", - "//foundation/multimedia/media_foundation/video_processing_engine/framework:video_processing", - "//foundation/multimedia/media_foundation/video_processing_engine/interface/kits/c/video_processing:libvideo_processing_ndk", + "$FRAMEWORK_DIR:video_processing", + "//foundation/multimedia/video_processing_engine/interfaces/kits/c/video_processing:libvideo_processing_ndk", ] external_deps = [ diff --git a/test/unittest/image_processing/BUILD.gn b/test/unittest/image_processing/BUILD.gn index d49db4729c47a6ca3ba07da49ad01d127a220da6..1d0b48b84383f88200250d76c6ad17cc468508f5 100644 --- a/test/unittest/image_processing/BUILD.gn +++ b/test/unittest/image_processing/BUILD.gn @@ -43,7 +43,7 @@ ohos_unittest("image_processing_unit_test") { sources = [ "image_processing_unit_test.cpp" ] deps = [ - + "$FRAMEWORK_DIR:image_processing", ] external_deps = [ @@ -53,7 +53,6 @@ ohos_unittest("image_processing_unit_test") { "hilog:libhilog", "hitrace:hitrace_meter", "media_foundation:native_media_core", - "media_foundation:image_processing", "image_framework:pixelmap", ] diff --git a/test/unittest/metadata_gen_video_ndk/BUILD.gn b/test/unittest/metadata_gen_video_ndk/BUILD.gn index ff36419e274904d1437feb4995ac8fc52f872467..e0af4450e7081c04ee95be12c5fd8cd05c857286 100644 --- a/test/unittest/metadata_gen_video_ndk/BUILD.gn +++ b/test/unittest/metadata_gen_video_ndk/BUILD.gn @@ -38,13 +38,16 @@ ohos_unittest("metadata_gen_video_ndk_unit_test") { "$ALGORITHM_DIR/colorspace_converter_video/include", "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/include", "$TEST_UTILS_PATH/ColorSpaceConverter/sample/video", + "//foundation/multimedia/media_foundation/interface/kits/c/", + "//foundation/graphic/graphic_2d/interfaces/inner_api/", + "//foundation/graphic/graphic_2d_ext/ohcore/graphic_compat_layer/include/utils/", ] sources = [ "metadata_gen_video_ndk_unit_test.cpp" ] deps = [ "$FRAMEWORK_DIR:videoprocessingengine", - "$FRAMEWORK_DIR:video_processing_capi_impl", + "$FRAMEWORK_DIR:video_processing", ] external_deps = [ @@ -58,7 +61,6 @@ ohos_unittest("metadata_gen_video_ndk_unit_test") { "hitrace:hitrace_meter", "media_foundation:media_foundation", "media_foundation:native_media_core", - "media_foundation:video_processing", "ipc:ipc_core", "memory_utils:libdmabufheap", ] diff --git a/test/unittest/video_variable_refreshrate_test/BUILD.gn b/test/unittest/video_variable_refreshrate_test/BUILD.gn index ec600dd295de5a1765f71239ca99ffb0f55e0df9..8b75dca4faeab672dee275fcdc0d30a31ec35a0f 100644 --- a/test/unittest/video_variable_refreshrate_test/BUILD.gn +++ b/test/unittest/video_variable_refreshrate_test/BUILD.gn @@ -45,7 +45,7 @@ ohos_unittest("video_variable_refreshrate_unit_test") { "drivers_interface_display:display_commontype_idl_headers", "drivers_interface_display:display_composer_idl_headers_1.2", "drivers_interface_display:libdisplay_commontype_proxy_2.0", - "drivers_interface_display:libdisplay_composer_hdi_impl", + "drivers_interface_display:libdisplay_composer_hdi_impl_1.2", "graphic_surface:surface", "hilog:libhilog", "hitrace:hitrace_meter",