From 11225b603cc796ced49c08d6c0b322770fdeb5a5 Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Wed, 24 Apr 2024 14:11:44 +0800 Subject: [PATCH 1/8] add profession mode Signed-off-by: lvxiaoqiang Change-Id: I8aee9fb833de2a56fc23db5a030b3f442cd505d4 --- .../src/input/camera_manager_napi.cpp | 51 +- .../src/mode/mode_manager_napi.cpp | 7 - .../src/mode/portrait_session_napi.cpp | 2 +- .../src/mode/profession_session_napi.cpp | 1549 ++++++++++++++++ .../src/native_module_ohos_camera.cpp | 1 + .../js/camera_napi/src/output/photo_napi.cpp | 63 +- .../src/output/photo_output_napi.cpp | 179 +- .../src/session/camera_session_napi.cpp | 207 ++- frameworks/native/camera/BUILD.gn | 2 + .../native/camera/src/input/camera_device.cpp | 4 +- .../native/camera/src/input/camera_info.cpp | 4 +- .../camera/src/input/camera_manager.cpp | 35 +- .../native/camera/src/output/photo_output.cpp | 12 + .../camera/src/session/capture_session.cpp | 227 ++- .../camera/src/session/portrait_session.cpp | 10 +- .../camera/src/session/profession_session.cpp | 1558 +++++++++++++++++ .../include/camera_framework_moduletest.h | 8 + .../src/camera_framework_moduletest.cpp | 479 +++++ .../camera/include/input/camera_manager.h | 1 + .../include/output/camera_output_capability.h | 8 +- .../camera/include/output/photo_output.h | 11 +- .../include/session/capture_scene_const.h | 13 + .../camera/include/session/capture_session.h | 73 +- .../camera/include/session/portrait_session.h | 1 - .../include/session/profession_session.h | 559 ++++++ interfaces/inner_api/native/test/BUILD.gn | 36 + .../native/test/camera_capture_mode.cpp | 3 - .../native/test/camera_capture_profession.cpp | 382 ++++ .../camera_napi/@ohos.multimedia.camera.d.ts | 26 +- interfaces/kits/js/camera_napi/BUILD.gn | 1 + .../include/input/camera_info_napi.h | 3 +- .../camera_napi/include/input/camera_napi.h | 30 +- .../js/camera_napi/include/listener_base.h | 2 +- .../include/mode/profession_session_napi.h | 191 ++ .../include/native_module_ohos_camera.h | 1 + .../camera_napi/include/output/photo_napi.h | 6 +- .../include/output/photo_output_napi.h | 29 + .../include/session/camera_session_napi.h | 56 +- .../kits/native/include/camera/camera.h | 7 +- .../camera_service_ipc_interface_code.h | 3 +- .../binder/base/include/istream_capture.h | 4 +- .../client/include/hstream_capture_proxy.h | 4 +- .../client/src/hstream_capture_proxy.cpp | 22 + .../server/include/hstream_capture_stub.h | 1 + .../server/src/hstream_capture_stub.cpp | 14 + .../camera_service/include/hcamera_service.h | 12 +- .../camera_service/include/hstream_capture.h | 2 + .../camera_service/src/hcamera_service.cpp | 49 +- .../camera_service/src/hstream_capture.cpp | 28 +- 49 files changed, 5809 insertions(+), 167 deletions(-) create mode 100644 frameworks/js/camera_napi/src/mode/profession_session_napi.cpp create mode 100644 frameworks/native/camera/src/session/profession_session.cpp create mode 100644 interfaces/inner_api/native/camera/include/session/profession_session.h create mode 100644 interfaces/inner_api/native/test/camera_capture_profession.cpp create mode 100644 interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h diff --git a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp index 265d93784..01309a1af 100644 --- a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp @@ -31,6 +31,7 @@ #include "mode/photo_session_napi.h" #include "mode/photo_session_for_sys_napi.h" #include "mode/portrait_session_napi.h" +#include "mode/profession_session_napi.h" #include "mode/video_session_napi.h" #include "mode/video_session_for_sys_napi.h" namespace OHOS { @@ -477,13 +478,6 @@ napi_value CameraManagerNapi::CreateCameraSessionInstance(napi_env env, napi_cal return result; } -enum JsSceneMode { - JS_CAPTURE = 1, - JS_VIDEO = 2, - JS_PORTRAIT = 3, - JS_NIGHT = 4, -}; - napi_value CameraManagerNapi::CreateSessionInstance(napi_env env, napi_callback_info info) { MEDIA_INFO_LOG("CreateSessionInstance is called"); @@ -508,20 +502,26 @@ napi_value CameraManagerNapi::CreateSessionInstance(napi_env env, napi_callback_ napi_get_value_int32(env, argv[PARAM0], &jsModeName); MEDIA_INFO_LOG("CameraManagerNapi::CreateSessionInstance mode = %{public}d", jsModeName); switch (jsModeName) { - case JS_CAPTURE: + case SceneMode::CAPTURE: result = CameraNapiSecurity::CheckSystemApp(env, false) ? PhotoSessionForSysNapi::CreateCameraSession(env) : PhotoSessionNapi::CreateCameraSession(env); break; - case JS_VIDEO: + case SceneMode::VIDEO: result = CameraNapiSecurity::CheckSystemApp(env, false) ? VideoSessionForSysNapi::CreateCameraSession(env) : VideoSessionNapi::CreateCameraSession(env); break; - case JS_PORTRAIT: + case SceneMode::PORTRAIT: result = PortraitSessionNapi::CreateCameraSession(env); break; - case JS_NIGHT: + case SceneMode::NIGHT: result = NightSessionNapi::CreateCameraSession(env); break; + case SceneMode::PROFESSIONAL_PHOTO: + result = ProfessionSessionNapi::CreateCameraSession(env, SceneMode::PROFESSIONAL_PHOTO); + break; + case SceneMode::PROFESSIONAL_VIDEO: + result = ProfessionSessionNapi::CreateCameraSession(env, SceneMode::PROFESSIONAL_VIDEO); + break; default: MEDIA_ERR_LOG("CameraManagerNapi::CreateSessionInstance mode = %{public}d not supported", jsModeName); break; @@ -885,7 +885,18 @@ napi_value CameraManagerNapi::GetSupportedCameras(napi_env env, napi_callback_in status = napi_unwrap(env, thisVar, reinterpret_cast(&cameraManagerNapi)); if (status == napi_ok && cameraManagerNapi != nullptr) { std::vector> cameraObjList = cameraManagerNapi->cameraManager_->GetSupportedCameras(); - result = CreateCameraJSArray(env, status, cameraObjList); + std::vector> selectedCameraList; + if (!CameraNapiSecurity::CheckSystemApp(env)) { + std::copy_if(cameraObjList.begin(), cameraObjList.end(), + std::back_inserter(selectedCameraList), [](const auto& it) { + return it->GetCameraType() == CAMERA_TYPE_UNSUPPORTED || it->GetCameraType() == CAMERA_TYPE_DEFAULT; + }); + MEDIA_DEBUG_LOG("CameraManagerNapi::GetSupportedCameras size=[%{public}zu]", selectedCameraList.size()); + result = CreateCameraJSArray(env, status, selectedCameraList); + } else { + result = CreateCameraJSArray(env, status, cameraObjList); + MEDIA_DEBUG_LOG("CameraManagerNapi::GetSupportedCameras size=[%{public}zu]", cameraObjList.size()); + } } else { MEDIA_ERR_LOG("GetSupportedCameras call Failed!"); } @@ -995,18 +1006,26 @@ napi_value CameraManagerNapi::GetSupportedOutputCapability(napi_env env, napi_ca napi_get_value_int32(env, argv[PARAM1], &sceneMode); MEDIA_INFO_LOG("CameraManagerNapi::GetSupportedOutputCapability mode = %{public}d", sceneMode); switch (sceneMode) { - case JS_CAPTURE: + case SceneMode::CAPTURE: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::CAPTURE); break; - case JS_VIDEO: + case SceneMode::VIDEO: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::VIDEO); break; - case JS_PORTRAIT: + case SceneMode::PORTRAIT: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::PORTRAIT); break; - case JS_NIGHT: + case SceneMode::NIGHT: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::NIGHT); break; + case SceneMode::PROFESSIONAL_PHOTO: + result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, + SceneMode::PROFESSIONAL_PHOTO); + break; + case SceneMode::PROFESSIONAL_VIDEO: + result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, + SceneMode::PROFESSIONAL_VIDEO); + break; default: MEDIA_ERR_LOG("CreateCameraSessionInstance mode = %{public}d not supported", sceneMode); break; diff --git a/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp b/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp index eee1f166d..fdada8d64 100644 --- a/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp @@ -128,13 +128,6 @@ napi_value ModeManagerNapi::CreateModeManager(napi_env env) return result; } -enum JsSceneMode { - JS_CAPTURE = 1, - JS_VIDEO = 2, - JS_PORTRAIT = 3, - JS_NIGHT = 4, -}; - napi_value ModeManagerNapi::CreateCameraSessionInstance(napi_env env, napi_callback_info info) { MEDIA_INFO_LOG("CreateCameraSessionInstance is called"); diff --git a/frameworks/js/camera_napi/src/mode/portrait_session_napi.cpp b/frameworks/js/camera_napi/src/mode/portrait_session_napi.cpp index ffdfe0da8..c8978d7b9 100644 --- a/frameworks/js/camera_napi/src/mode/portrait_session_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/portrait_session_napi.cpp @@ -437,7 +437,7 @@ napi_value PortraitSessionNapi::SetPhysicalAperture(napi_env env, napi_callback_ napi_get_value_double(env, argv[PARAM0], &physicalAperture); portraitSessionNapi->portraitSession_->LockForControl(); portraitSessionNapi->portraitSession_->SetPhysicalAperture((float)physicalAperture); - MEDIA_INFO_LOG("SetPhysicalAperture set physicalAperture %{public}f!", physicalAperture); + MEDIA_INFO_LOG("SetPhysicalAperture set physicalAperture %{public}f!", ConfusingNumber(physicalAperture)); portraitSessionNapi->portraitSession_->UnlockForControl(); } else { MEDIA_ERR_LOG("SetPhysicalAperture call Failed!"); diff --git a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp new file mode 100644 index 000000000..9f21d56a4 --- /dev/null +++ b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp @@ -0,0 +1,1549 @@ +/* + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "js_native_api.h" +#include "mode/profession_session_napi.h" +#include "camera_napi_security_utils.h" + +namespace OHOS { +namespace CameraStandard { +using namespace std; + +thread_local napi_ref ProfessionSessionNapi::sConstructor_ = nullptr; + +ProfessionSessionNapi::ProfessionSessionNapi() : env_(nullptr), wrapper_(nullptr) +{ +} +ProfessionSessionNapi::~ProfessionSessionNapi() +{ + MEDIA_DEBUG_LOG("~ProfessionSessionNapi is called"); + if (wrapper_ != nullptr) { + napi_delete_reference(env_, wrapper_); + } + if (professionSession_) { + professionSession_ = nullptr; + } + exposureInfoCallback_ = nullptr; + isoInfoCallback_ = nullptr; + apertureInfoCallback_ = nullptr; + luminationInfoCallback_ = nullptr; +} + +void ProfessionSessionNapi::ProfessionSessionNapiDestructor(napi_env env, void* nativeObject, void* finalize_hint) +{ + MEDIA_DEBUG_LOG("ProfessionSessionNapiDestructor is called"); + ProfessionSessionNapi* cameraObj = reinterpret_cast(nativeObject); + if (cameraObj != nullptr) { + delete cameraObj; + } +} +napi_value ProfessionSessionNapi::Init(napi_env env, napi_value exports) +{ + MEDIA_DEBUG_LOG("Init is called"); + napi_status status; + napi_value ctorObj; + std::vector manual_exposure_props = CameraSessionNapi::auto_exposure_props; + std::vector manual_exposure_funcs = { + DECLARE_NAPI_FUNCTION("getSupportedMeteringModes", GetSupportedMeteringModes), + DECLARE_NAPI_FUNCTION("isExposureMeteringModeSupported", IsMeteringModeSupported), + DECLARE_NAPI_FUNCTION("getExposureMeteringMode", GetMeteringMode), + DECLARE_NAPI_FUNCTION("setExposureMeteringMode", SetMeteringMode), + + DECLARE_NAPI_FUNCTION("getExposureDurationRange", GetExposureDurationRange), + DECLARE_NAPI_FUNCTION("getExposureDuration", GetExposureDuration), + DECLARE_NAPI_FUNCTION("setExposureDuration", SetExposureDuration), + }; + manual_exposure_props.insert(manual_exposure_props.end(), + manual_exposure_funcs.begin(), manual_exposure_funcs.end()); + + std::vector pro_manual_focus_props = CameraSessionNapi::manual_focus_props; + std::vector manual_focus_funcs = { + DECLARE_NAPI_FUNCTION("getSupportedFocusAssistFlashModes", GetSupportedFocusAssistFlashModes), + DECLARE_NAPI_FUNCTION("isFocusAssistSupported", IsFocusAssistFlashModeSupported), + DECLARE_NAPI_FUNCTION("getFocusAssistFlashMode", GetFocusAssistFlashMode), + DECLARE_NAPI_FUNCTION("setFocusAssist", SetFocusAssistFlashMode), + }; + + pro_manual_focus_props.insert(pro_manual_focus_props.end(), + manual_focus_funcs.begin(), manual_focus_funcs.end()); + + std::vector manual_iso_props = { + DECLARE_NAPI_FUNCTION("getISORange", GetIsoRange), + DECLARE_NAPI_FUNCTION("isManualISOSupported", IsManualIsoSupported), + DECLARE_NAPI_FUNCTION("getISO", GetISO), + DECLARE_NAPI_FUNCTION("setISO", SetISO), + }; + + std::vector auto_wb_props = { + DECLARE_NAPI_FUNCTION("getSupportedWhiteBalanceModes", GetSupportedWhiteBalanceModes), + DECLARE_NAPI_FUNCTION("isWhiteBalanceModeSupported", IsWhiteBalanceModeSupported), + DECLARE_NAPI_FUNCTION("getWhiteBalanceMode", GetWhiteBalanceMode), + DECLARE_NAPI_FUNCTION("setWhiteBalanceMode", SetWhiteBalanceMode), + }; + + std::vector manual_wb_props = { + DECLARE_NAPI_FUNCTION("getWhiteBalanceRange", GetManualWhiteBalanceRange), + DECLARE_NAPI_FUNCTION("isManualWhiteBalanceSupported", IsManualWhiteBalanceSupported), + DECLARE_NAPI_FUNCTION("getWhiteBalance", GetManualWhiteBalance), + DECLARE_NAPI_FUNCTION("setWhiteBalance", SetManualWhiteBalance), + }; + + std::vector pro_session_props = { + DECLARE_NAPI_FUNCTION("getSupportedExposureHintModes", GetSupportedExposureHintModes), + DECLARE_NAPI_FUNCTION("getExposureHintMode", GetExposureHintMode), + DECLARE_NAPI_FUNCTION("setExposureHintMode", SetExposureHintMode), + + DECLARE_NAPI_FUNCTION("getSupportedPhysicalApertures", GetSupportedPhysicalApertures), + DECLARE_NAPI_FUNCTION("getPhysicalAperture", GetPhysicalAperture), + DECLARE_NAPI_FUNCTION("setPhysicalAperture", SetPhysicalAperture), + DECLARE_NAPI_FUNCTION("on", On), + DECLARE_NAPI_FUNCTION("once", Once), + DECLARE_NAPI_FUNCTION("off", Off), + }; + + std::vector> descriptors = { + CameraSessionNapi::camera_process_props, CameraSessionNapi::zoom_props, + CameraSessionNapi::color_effect_props, CameraSessionNapi::flash_props, + CameraSessionNapi::focus_props, manual_iso_props, auto_wb_props, manual_wb_props, + manual_exposure_props, pro_manual_focus_props, pro_session_props}; + + std::vector professional_session_props = + CameraNapiUtils::GetPropertyDescriptor(descriptors); + status = napi_define_class(env, PROFESSIONAL_SESSION_NAPI_CLASS_NAME, NAPI_AUTO_LENGTH, + ProfessionSessionNapiConstructor, nullptr, + professional_session_props.size(), + professional_session_props.data(), &ctorObj); + if (status == napi_ok) { + int32_t refCount = 1; + status = napi_create_reference(env, ctorObj, refCount, &sConstructor_); + if (status == napi_ok) { + status = napi_set_named_property(env, exports, PROFESSIONAL_SESSION_NAPI_CLASS_NAME, ctorObj); + if (status == napi_ok) { + return exports; + } + } + } + MEDIA_ERR_LOG("Init call Failed!"); + return nullptr; +} + +napi_value ProfessionSessionNapi::CreateCameraSession(napi_env env, SceneMode mode) +{ + MEDIA_DEBUG_LOG("CreateCameraSession is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + napi_value constructor; + status = napi_get_reference_value(env, sConstructor_, &constructor); + if (status == napi_ok) { + sCameraSession_ = CameraManager::GetInstance()->CreateCaptureSession(mode); + if (sCameraSession_ == nullptr) { + MEDIA_ERR_LOG("Failed to create Profession session instance"); + napi_get_undefined(env, &result); + return result; + } + status = napi_new_instance(env, constructor, 0, nullptr, &result); + sCameraSession_ = nullptr; + if (status == napi_ok && result != nullptr) { + MEDIA_DEBUG_LOG("success to create Profession session napi instance"); + return result; + } else { + MEDIA_ERR_LOG("Failed to create Profession session napi instance"); + } + } + MEDIA_ERR_LOG("Failed to create Profession session napi instance last"); + napi_get_undefined(env, &result); + return result; +} + +napi_value ProfessionSessionNapi::ProfessionSessionNapiConstructor(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("ProfessionSessionNapiConstructor is called"); + napi_status status; + napi_value result = nullptr; + napi_value thisVar = nullptr; + + napi_get_undefined(env, &result); + CAMERA_NAPI_GET_JS_OBJ_WITH_ZERO_ARGS(env, info, status, thisVar); + + if (status == napi_ok && thisVar != nullptr) { + std::unique_ptr obj = std::make_unique(); + obj->env_ = env; + if (sCameraSession_ == nullptr) { + MEDIA_ERR_LOG("sCameraSession_ is null"); + return result; + } + obj->professionSession_ = static_cast(sCameraSession_.GetRefPtr()); + obj->cameraSession_ = obj->professionSession_; + if (obj->professionSession_ == nullptr) { + MEDIA_ERR_LOG("professionSession_ is null"); + return result; + } + status = napi_wrap(env, thisVar, reinterpret_cast(obj.get()), + ProfessionSessionNapi::ProfessionSessionNapiDestructor, nullptr, nullptr); + if (status == napi_ok) { + obj.release(); + return thisVar; + } else { + MEDIA_ERR_LOG("ProfessionSessionNapi Failure wrapping js to native napi"); + } + } + MEDIA_ERR_LOG("ProfessionSessionNapi call Failed!"); + return result; +} +// MeteringMode +napi_value ProfessionSessionNapi::GetSupportedMeteringModes(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetSupportedMeteringModes is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + status = napi_create_array(env, &result); + if (status != napi_ok) { + MEDIA_ERR_LOG("napi_create_array call Failed!"); + return result; + } + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + std::vector meteringModes; + int32_t retCode = professionSessionNapi->professionSession_->GetSupportedMeteringModes(meteringModes); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedMeteringModes len = %{public}zu", + meteringModes.size()); + if (!meteringModes.empty()) { + for (size_t i = 0; i < meteringModes.size(); i++) { + MeteringMode mode = meteringModes[i]; + napi_value value; + napi_create_int32(env, mode, &value); + napi_set_element(env, result, i, value); + } + } + } else { + MEDIA_ERR_LOG("GetSupportedMeteringModes call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::IsMeteringModeSupported(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("IsMeteringModeSupported is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + MeteringMode mode = (MeteringMode)value; + bool isSupported; + int32_t retCode = professionSessionNapi->professionSession_->IsMeteringModeSupported(mode, isSupported); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_get_boolean(env, isSupported, &result); + } else { + MEDIA_ERR_LOG("IsMeteringModeSupported call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetMeteringMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetMeteringMode is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + MeteringMode mode; + int32_t retCode = professionSessionNapi->professionSession_->GetMeteringMode(mode); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, mode, &result); + } else { + MEDIA_ERR_LOG("GetMeteringMode call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetMeteringMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetMeteringMode is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + MeteringMode mode = static_cast(value); + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_-> + SetMeteringMode(static_cast(mode)); + MEDIA_INFO_LOG("ProfessionSessionNapi SetMeteringMode set meteringMode %{public}d!", mode); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetMeteringMode call Failed!"); + } + return result; +} +// ExposureDuration +napi_value ProfessionSessionNapi::GetExposureDurationRange(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("getExposureDurationRange is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + std::vector vecExposureList; + int32_t retCode = professionSessionNapi->professionSession_->GetSensorExposureTimeRange(vecExposureList); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + if (vecExposureList.empty() || napi_create_array(env, &result) != napi_ok) { + return result; + } + for (size_t i = 0; i < vecExposureList.size(); i++) { + uint32_t exposure = vecExposureList[i]; + MEDIA_DEBUG_LOG("EXPOSURE_RANGE : exposureDuration = %{public}d", vecExposureList[i]); + napi_value value; + napi_create_uint32(env, exposure, &value); + napi_set_element(env, result, i, value); + } + MEDIA_DEBUG_LOG("EXPOSURE_RANGE ExposureList size : %{public}zu", vecExposureList.size()); + } else { + MEDIA_ERR_LOG("getExposureDurationRange call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetExposureDuration(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetExposureDuration is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi!= nullptr) { + uint32_t exposureDurationValue; + int32_t retCode = professionSessionNapi->professionSession_->GetSensorExposureTime(exposureDurationValue); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_DEBUG_LOG("GetExposureDuration : exposureDuration = %{public}d", exposureDurationValue); + napi_create_uint32(env, exposureDurationValue, &result); + } else { + MEDIA_ERR_LOG("GetExposureDuration call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetExposureDuration(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetExposureDuration is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + uint32_t exposureDurationValue; + napi_get_value_uint32(env, argv[PARAM0], &exposureDurationValue); + MEDIA_DEBUG_LOG("SetExposureDuration : exposureDuration = %{public}d", exposureDurationValue); + professionSessionNapi->professionSession_->LockForControl(); + int32_t retCode = professionSessionNapi->professionSession_->SetSensorExposureTime(exposureDurationValue); + professionSessionNapi->professionSession_->UnlockForControl(); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return result; + } + } else { + MEDIA_ERR_LOG("SetExposureDuration call Failed!"); + } + return result; +} + +// FocusAssistFlashMode +napi_value ProfessionSessionNapi::GetSupportedFocusAssistFlashModes(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetSupportedFocusAssistFlashModes is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + status = napi_create_array(env, &result); + if (status != napi_ok) { + MEDIA_ERR_LOG("napi_create_array call Failed!"); + return result; + } + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + std::vector focusAssistFlashs; + int32_t retCode = + professionSessionNapi->professionSession_->GetSupportedFocusAssistFlashModes(focusAssistFlashs); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedFocusAssistFlashModes len = %{public}zu", + focusAssistFlashs.size()); + if (!focusAssistFlashs.empty()) { + for (size_t i = 0; i < focusAssistFlashs.size(); i++) { + FocusAssistFlashMode mode = focusAssistFlashs[i]; + napi_value value; + napi_create_int32(env, mode, &value); + napi_set_element(env, result, i, value); + } + } + } else { + MEDIA_ERR_LOG("GetSupportedFocusAssistFlashModes call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::IsFocusAssistFlashModeSupported(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("IsFocusAssistFlashModeSupported is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + FocusAssistFlashMode mode = static_cast(value); + bool isSupported; + int32_t retCode = professionSessionNapi->professionSession_->IsFocusAssistFlashModeSupported(mode, isSupported); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_get_boolean(env, isSupported, &result); + } else { + MEDIA_ERR_LOG("IsFocusAssistFlashModeSupported call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetFocusAssistFlashMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetFocusAssistFlashMode is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + FocusAssistFlashMode mode; + int32_t retCode = professionSessionNapi->professionSession_->GetFocusAssistFlashMode(mode); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, mode, &result); + } else { + MEDIA_ERR_LOG("GetFocusAssistFlashMode call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetFocusAssistFlashMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetFocusAssistFlashMode is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + bool value; + napi_get_value_bool(env, argv[PARAM0], &value); + FocusAssistFlashMode mode = static_cast(value); + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_-> + SetFocusAssistFlashMode(static_cast(mode)); + MEDIA_INFO_LOG("ProfessionSessionNapi SetFocusAssistFlashMode set focusAssistFlash %{public}d!", mode); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetFocusAssistFlashMode call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetIsoRange(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetIsoRange is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + std::vector vecIsoList; + int32_t retCode = professionSessionNapi->professionSession_->GetIsoRange(vecIsoList); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetIsoRange len = %{public}zu", vecIsoList.size()); + + if (!vecIsoList.empty() && napi_create_array(env, &result) == napi_ok) { + for (size_t i = 0; i < vecIsoList.size(); i++) { + int32_t iso = vecIsoList[i]; + napi_value value; + napi_create_int32(env, iso, &value); + napi_set_element(env, result, i, value); + } + } else { + MEDIA_ERR_LOG("vecIsoList is empty or failed to create array!"); + } + } else { + MEDIA_ERR_LOG("GetIsoRange call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::IsManualIsoSupported(napi_env env, napi_callback_info info) +{ + if (!CameraNapiSecurity::CheckSystemApp(env)) { + MEDIA_ERR_LOG("SystemApi IsManualIsoSupported is called!"); + return nullptr; + } + MEDIA_DEBUG_LOG("IsManualIsoSupported is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + bool isSupported = professionSessionNapi->professionSession_->IsManualIsoSupported(); + napi_get_boolean(env, isSupported, &result); + } else { + MEDIA_ERR_LOG("IsManualIsoSupported call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetISO(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetISO is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t iso; + int32_t retCode = professionSessionNapi->professionSession_->GetISO(iso); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, iso, &result); + } else { + MEDIA_ERR_LOG("GetISO call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetISO(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetISO is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t iso; + napi_get_value_int32(env, argv[PARAM0], &iso); + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_->SetISO(iso); + MEDIA_INFO_LOG("ProfessionSessionNapi::SetISO set iso:%{public}d", iso); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetISO call Failed!"); + } + return result; +} + +// ------------------------------------------------auto_awb_props------------------------------------------------------- +napi_value ProfessionSessionNapi::GetSupportedWhiteBalanceModes(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetSupportedWhiteBalanceModes is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + status = napi_create_array(env, &result); + if (status != napi_ok) { + MEDIA_ERR_LOG("napi_create_array call Failed!"); + return result; + } + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + std::vector whiteBalanceModes; + int32_t retCode = professionSessionNapi->professionSession_->GetSupportedWhiteBalanceModes(whiteBalanceModes); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + + MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedWhiteBalanceModes len = %{public}zu", + whiteBalanceModes.size()); + if (!whiteBalanceModes.empty()) { + for (size_t i = 0; i < whiteBalanceModes.size(); i++) { + WhiteBalanceMode whiteBalanceMode = whiteBalanceModes[i]; + napi_value value; + napi_create_int32(env, whiteBalanceMode, &value); + napi_set_element(env, result, i, value); + } + } + } else { + MEDIA_ERR_LOG("GetSupportedWhiteBalanceModes call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::IsWhiteBalanceModeSupported(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("IsWhiteBalanceModeSupported is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + WhiteBalanceMode mode = (WhiteBalanceMode)value; + bool isSupported; + int32_t retCode = professionSessionNapi->professionSession_->IsWhiteBalanceModeSupported(mode, isSupported); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_get_boolean(env, isSupported, &result); + } else { + MEDIA_ERR_LOG("IsWhiteBalanceModeSupported call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetWhiteBalanceMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetWhiteBalanceMode is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + WhiteBalanceMode whiteBalanceMode; + int32_t retCode = professionSessionNapi->professionSession_->GetWhiteBalanceMode(whiteBalanceMode); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, whiteBalanceMode, &result); + } else { + MEDIA_ERR_LOG("GetWhiteBalanceMode call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetWhiteBalanceMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetWhiteBalanceMode is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + WhiteBalanceMode mode = (WhiteBalanceMode)value; + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_->SetWhiteBalanceMode(mode); + MEDIA_INFO_LOG("ProfessionSessionNapi::SetWhiteBalanceMode set mode:%{public}d", value); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetWhiteBalanceMode call Failed!"); + } + return result; +} + +// -----------------------------------------------manual_awb_props------------------------------------------------------ +napi_value ProfessionSessionNapi::GetManualWhiteBalanceRange(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetManualWhiteBalanceRange is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr) { + std::vector whiteBalanceRange = {}; + int32_t retCode = professionSessionNapi->professionSession_->GetManualWhiteBalanceRange(whiteBalanceRange); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetManualWhiteBalanceRange len = %{public}zu", whiteBalanceRange.size()); + + if (!whiteBalanceRange.empty() && napi_create_array(env, &result) == napi_ok) { + for (size_t i = 0; i < whiteBalanceRange.size(); i++) { + int32_t iso = whiteBalanceRange[i]; + napi_value value; + napi_create_int32(env, iso, &value); + napi_set_element(env, result, i, value); + } + } else { + MEDIA_ERR_LOG("whiteBalanceRange is empty or failed to create array!"); + } + } else { + MEDIA_ERR_LOG("GetManualWhiteBalanceRange call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::IsManualWhiteBalanceSupported(napi_env env, napi_callback_info info) +{ + if (!CameraNapiSecurity::CheckSystemApp(env)) { + MEDIA_ERR_LOG("SystemApi IsManualIsoSupported is called!"); + return nullptr; + } + MEDIA_DEBUG_LOG("IsManualIsoSupported is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + bool isSupported; + int32_t retCode = professionSessionNapi->professionSession_->IsManualWhiteBalanceSupported(isSupported); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_get_boolean(env, isSupported, &result); + } else { + MEDIA_ERR_LOG("IsManualIsoSupported call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetManualWhiteBalance(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetISO is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t wbValue; + int32_t retCode = professionSessionNapi->professionSession_->GetManualWhiteBalance(wbValue); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, wbValue, &result); + } else { + MEDIA_ERR_LOG("GetISO call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetManualWhiteBalance(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetISO is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t wbValue; + napi_get_value_int32(env, argv[PARAM0], &wbValue); + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_->SetManualWhiteBalance(wbValue); + MEDIA_INFO_LOG("ProfessionSessionNapi::SetManualWhiteBalance set wbValue:%{public}d", wbValue); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetManualWhiteBalance call Failed!"); + } + return result; +} + +// ExposureHintMode +napi_value ProfessionSessionNapi::GetSupportedExposureHintModes(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetSupportedExposureHintModes is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + status = napi_create_array(env, &result); + if (status != napi_ok) { + MEDIA_ERR_LOG("napi_create_array call Failed!"); + return result; + } + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + std::vector exposureHints; + int32_t retCode = + professionSessionNapi->professionSession_->GetSupportedExposureHintModes(exposureHints); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedExposureHintModes len = %{public}zu", + exposureHints.size()); + if (!exposureHints.empty()) { + for (size_t i = 0; i < exposureHints.size(); i++) { + ExposureHintMode mode = exposureHints[i]; + napi_value value; + napi_create_int32(env, mode, &value); + napi_set_element(env, result, i, value); + } + } + } else { + MEDIA_ERR_LOG("GetSupportedExposureHintModes call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetExposureHintMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetExposureHintMode is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + ExposureHintMode mode = EXPOSURE_HINT_UNSUPPORTED; + int32_t retCode = professionSessionNapi->professionSession_->GetExposureHintMode(mode); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_int32(env, mode, &result); + } else { + MEDIA_ERR_LOG("GetExposureHintMode call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetExposureHintMode(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetExposureHintMode is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + int32_t value; + napi_get_value_int32(env, argv[PARAM0], &value); + ExposureHintMode mode = static_cast(value); + professionSessionNapi->professionSession_->LockForControl(); + professionSessionNapi->professionSession_-> + SetExposureHintMode(static_cast(mode)); + MEDIA_INFO_LOG("ProfessionSessionNapi SetExposureHintMode set exposureHint %{public}d!", mode); + professionSessionNapi->professionSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetExposureHintMode call Failed!"); + } + return result; +} + +//Aperture +napi_value ProfessionSessionNapi::ProcessingPhysicalApertures(napi_env env, + std::vector> physicalApertures) +{ + napi_value result = nullptr; + napi_create_array(env, &result); + size_t zoomRangeSize = 2; + for (size_t i = 0; i < physicalApertures.size(); i++) { + if (physicalApertures[i].size() <= zoomRangeSize) { + continue; + } + napi_value zoomRange; + napi_create_array(env, &zoomRange); + napi_value physicalApertureRange; + napi_create_array(env, &physicalApertureRange); + for (size_t y = 0; y < physicalApertures[i].size(); y++) { + if (y < zoomRangeSize) { + napi_value value; + napi_create_double(env, CameraNapiUtils::FloatToDouble(physicalApertures[i][y]), &value); + napi_set_element(env, zoomRange, y, value); + continue; + } + napi_value value; + napi_create_double(env, CameraNapiUtils::FloatToDouble(physicalApertures[i][y]), &value); + napi_set_element(env, physicalApertureRange, y - zoomRangeSize, value); + } + napi_value obj; + napi_create_object(env, &obj); + napi_set_named_property(env, obj, "zoomRange", zoomRange); + napi_set_named_property(env, obj, "apertures", physicalApertureRange); + napi_set_element(env, result, i, obj); + } + return result; +} + +napi_value ProfessionSessionNapi::GetSupportedPhysicalApertures(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetSupportedPhysicalApertures is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + status = napi_create_array(env, &result); + if (status != napi_ok) { + MEDIA_ERR_LOG("napi_create_array call Failed!"); + return result; + } + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + std::vector> physicalApertures = {}; + int32_t retCode = professionSessionNapi->professionSession_->GetSupportedPhysicalApertures(physicalApertures); + MEDIA_INFO_LOG("GetSupportedPhysicalApertures len = %{public}zu", physicalApertures.size()); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + if (!physicalApertures.empty()) { + result = ProcessingPhysicalApertures(env, physicalApertures); + } + } else { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::GetPhysicalAperture(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetPhysicalAperture is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + float physicalAperture = 0.0; + int32_t retCode = professionSessionNapi->professionSession_->GetPhysicalAperture(physicalAperture); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_double(env, CameraNapiUtils::FloatToDouble(physicalAperture), &result); + } else { + MEDIA_ERR_LOG("GetPhysicalAperture call Failed!"); + } + return result; +} + +napi_value ProfessionSessionNapi::SetPhysicalAperture(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetPhysicalAperture is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + ProfessionSessionNapi* professionSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&professionSessionNapi)); + if (status == napi_ok && professionSessionNapi != nullptr && professionSessionNapi->professionSession_ != nullptr) { + double physicalAperture; + napi_get_value_double(env, argv[PARAM0], &physicalAperture); + professionSessionNapi->professionSession_->LockForControl(); + int32_t retCode = professionSessionNapi->professionSession_->SetPhysicalAperture((float)physicalAperture); + MEDIA_INFO_LOG("SetPhysicalAperture set physicalAperture %{public}f!", ConfusingNumber(physicalAperture)); + professionSessionNapi->professionSession_->UnlockForControl(); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + } else { + MEDIA_ERR_LOG("SetPhysicalAperture call Failed!"); + } + return result; +} + +void ProfessionSessionNapi::RegisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (abilityCallback_ == nullptr) { + abilityCallback_ = std::make_shared(env); + professionSession_->SetAbilityCallback(abilityCallback_); + } + abilityCallback_->SaveCallbackReference(callback, isOnce); +} + +void ProfessionSessionNapi::UnregisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (abilityCallback_ == nullptr) { + MEDIA_ERR_LOG("abilityCallback is null"); + } else { + abilityCallback_->RemoveCallbackRef(env, callback); + } +} + +void ProfessionSessionNapi::RegisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (exposureInfoCallback_ == nullptr) { + exposureInfoCallback_ = std::make_shared(env); + professionSession_->SetExposureInfoCallback(exposureInfoCallback_); + } + exposureInfoCallback_->SaveCallbackReference(callback, isOnce); +} + +void ProfessionSessionNapi::UnregisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (exposureInfoCallback_ == nullptr) { + MEDIA_ERR_LOG("abilityCallback is null"); + } else { + exposureInfoCallback_->RemoveCallbackRef(env, callback); + } +} + +void ProfessionSessionNapi::RegisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (isoInfoCallback_ == nullptr) { + isoInfoCallback_ = std::make_shared(env); + professionSession_->SetIsoInfoCallback(isoInfoCallback_); + } + isoInfoCallback_->SaveCallbackReference(callback, isOnce); +} + +void ProfessionSessionNapi::UnregisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (isoInfoCallback_ == nullptr) { + MEDIA_ERR_LOG("abilityCallback is null"); + } else { + isoInfoCallback_->RemoveCallbackRef(env, callback); + } +} + +void ProfessionSessionNapi::RegisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (apertureInfoCallback_ == nullptr) { + apertureInfoCallback_ = std::make_shared(env); + professionSession_->SetApertureInfoCallback(apertureInfoCallback_); + } + apertureInfoCallback_->SaveCallbackReference(callback, isOnce); +} + +void ProfessionSessionNapi::UnregisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (apertureInfoCallback_ == nullptr) { + MEDIA_ERR_LOG("apertureInfoCallback is null"); + } else { + apertureInfoCallback_->RemoveCallbackRef(env, callback); + } +} + +void ProfessionSessionNapi::RegisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (luminationInfoCallback_ == nullptr) { + ExposureHintMode mode = EXPOSURE_HINT_MODE_ON; + professionSession_->LockForControl(); + professionSession_->SetExposureHintMode(mode); + professionSession_->UnlockForControl(); + MEDIA_INFO_LOG("ProfessionSessionNapi SetExposureHintMode set exposureHint %{public}d!", mode); + luminationInfoCallback_ = std::make_shared(env); + professionSession_->SetLuminationInfoCallback(luminationInfoCallback_); + } + luminationInfoCallback_->SaveCallbackReference(callback, isOnce); +} + +void ProfessionSessionNapi::UnregisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (luminationInfoCallback_ == nullptr) { + MEDIA_ERR_LOG("abilityCallback is null"); + } else { + ExposureHintMode mode = EXPOSURE_HINT_MODE_OFF; + professionSession_->LockForControl(); + professionSession_->SetExposureHintMode(mode); + professionSession_->UnlockForControl(); + MEDIA_INFO_LOG("ProfessionSessionNapi SetExposureHintMode set exposureHint %{public}d!", mode); + luminationInfoCallback_->RemoveCallbackRef(env, callback); + } +} + +void ExposureInfoCallbackListener::OnExposureInfoChangedCallbackAsync(ExposureInfo info) const +{ + MEDIA_DEBUG_LOG("OnExposureInfoChangedCallbackAsync is called"); + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("failed to get event loop"); + return; + } + uv_work_t* work = new(std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("failed to allocate work"); + return; + } + std::unique_ptr callback = std::make_unique(info, this); + work->data = callback.get(); + int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) { + ExposureInfoChangedCallback* callback = reinterpret_cast(work->data); + if (callback) { + callback->listener_->OnExposureInfoChangedCallback(callback->info_); + delete callback; + } + delete work; + }, uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("failed to execute work"); + delete work; + } else { + callback.release(); + } +} + +void ExposureInfoCallbackListener::OnExposureInfoChangedCallback(ExposureInfo info) const +{ + MEDIA_DEBUG_LOG("OnExposureInfoChangedCallback is called"); + napi_value result[ARGS_TWO] = {nullptr, nullptr}; + napi_value callback = nullptr; + napi_value retVal; + for (auto it = baseCbList_.begin(); it != baseCbList_.end();) { + napi_env env = (*it)->env_; + napi_get_undefined(env, &result[PARAM0]); + napi_create_object(env, &result[PARAM1]); + napi_value value; + napi_create_uint32(env, info.exposureDurationValue, &value); + napi_set_named_property(env, result[PARAM1], "exposureTimeValue", value); + napi_get_reference_value(env, (*it)->cb_, &callback); + napi_call_function(env_, nullptr, callback, ARGS_TWO, result, &retVal); + if ((*it)->isOnce_) { + napi_status status = napi_delete_reference(env, (*it)->cb_); + CHECK_AND_RETURN_LOG(status == napi_ok, "Remove once cb ref: delete reference for callback fail"); + (*it)->cb_ = nullptr; + baseCbList_.erase(it); + } else { + it++; + } + } +} + +void ExposureInfoCallbackListener::OnExposureInfoChanged(ExposureInfo info) +{ + MEDIA_DEBUG_LOG("OnExposureInfoChanged is called, info: %{public}d", info.exposureDurationValue); + OnExposureInfoChangedCallbackAsync(info); +} + +void IsoInfoCallbackListener::OnIsoInfoChangedCallbackAsync(IsoInfo info) const +{ + MEDIA_DEBUG_LOG("OnIsoInfoChangedCallbackAsync is called"); + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("failed to get event loop"); + return; + } + uv_work_t* work = new(std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("failed to allocate work"); + return; + } + std::unique_ptr callback = std::make_unique(info, this); + work->data = callback.get(); + int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) { + IsoInfoChangedCallback* callback = reinterpret_cast(work->data); + if (callback) { + callback->listener_->OnIsoInfoChangedCallback(callback->info_); + delete callback; + } + delete work; + }, uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("failed to execute work"); + delete work; + } else { + callback.release(); + } +} + +void IsoInfoCallbackListener::OnIsoInfoChangedCallback(IsoInfo info) const +{ + MEDIA_DEBUG_LOG("OnIsoInfoChangedCallback is called"); + napi_value result[ARGS_TWO] = {nullptr, nullptr}; + napi_value callback = nullptr; + napi_value retVal; + for (auto it = baseCbList_.begin(); it != baseCbList_.end();) { + napi_env env = (*it)->env_; + napi_get_undefined(env, &result[PARAM0]); + napi_create_object(env, &result[PARAM1]); + napi_value value; + napi_create_int32(env, CameraNapiUtils::FloatToDouble(info.isoValue), &value); + napi_set_named_property(env, result[PARAM1], "iso", value); + napi_get_reference_value(env, (*it)->cb_, &callback); + napi_call_function(env_, nullptr, callback, ARGS_TWO, result, &retVal); + if ((*it)->isOnce_) { + napi_status status = napi_delete_reference(env, (*it)->cb_); + CHECK_AND_RETURN_LOG(status == napi_ok, "Remove once cb ref: delete reference for callback fail"); + (*it)->cb_ = nullptr; + baseCbList_.erase(it); + } else { + it++; + } + } +} + +void IsoInfoCallbackListener::OnIsoInfoChanged(IsoInfo info) +{ + MEDIA_DEBUG_LOG("OnIsoInfoChanged is called, info: %{public}d", info.isoValue); + OnIsoInfoChangedCallbackAsync(info); +} + +void ApertureInfoCallbackListener::OnApertureInfoChangedCallbackAsync(ApertureInfo info) const +{ + MEDIA_DEBUG_LOG("OnApertureInfoChangedCallbackAsync is called"); + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("failed to get event loop"); + return; + } + uv_work_t* work = new(std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("failed to allocate work"); + return; + } + std::unique_ptr callback = std::make_unique(info, this); + work->data = callback.get(); + int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) { + ApertureInfoChangedCallback* callback = reinterpret_cast(work->data); + if (callback) { + callback->listener_->OnApertureInfoChangedCallback(callback->info_); + delete callback; + } + delete work; + }, uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("failed to execute work"); + delete work; + } else { + callback.release(); + } +} + +void ApertureInfoCallbackListener::OnApertureInfoChangedCallback(ApertureInfo info) const +{ + MEDIA_DEBUG_LOG("OnApertureInfoChangedCallback is called"); + napi_value result[ARGS_TWO] = {nullptr, nullptr}; + napi_value callback = nullptr; + napi_value retVal; + for (auto it = baseCbList_.begin(); it != baseCbList_.end();) { + napi_env env = (*it)->env_; + napi_get_undefined(env, &result[PARAM0]); + napi_create_object(env, &result[PARAM1]); + napi_value value; + napi_create_double(env, info.apertureValue, &value); + napi_set_named_property(env, result[PARAM1], "aperture", value); + napi_get_reference_value(env, (*it)->cb_, &callback); + napi_call_function(env_, nullptr, callback, ARGS_TWO, result, &retVal); + if ((*it)->isOnce_) { + napi_status status = napi_delete_reference(env, (*it)->cb_); + CHECK_AND_RETURN_LOG(status == napi_ok, "Remove once cb ref: delete reference for callback fail"); + (*it)->cb_ = nullptr; + baseCbList_.erase(it); + } else { + it++; + } + } +} + +void ApertureInfoCallbackListener::OnApertureInfoChanged(ApertureInfo info) +{ + MEDIA_DEBUG_LOG("OnApertureInfoChanged is called, apertureValue: %{public}f", info.apertureValue); + OnApertureInfoChangedCallbackAsync(info); +} + +void LuminationInfoCallbackListener::OnLuminationInfoChangedCallbackAsync(LuminationInfo info) const +{ + MEDIA_DEBUG_LOG("OnLuminationInfoChangedCallbackAsync is called"); + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("failed to get event loop"); + return; + } + uv_work_t* work = new(std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("failed to allocate work"); + return; + } + std::unique_ptr callback = + std::make_unique(info, this); + work->data = callback.get(); + int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) { + LuminationInfoChangedCallback* callback = reinterpret_cast(work->data); + if (callback) { + callback->listener_->OnLuminationInfoChangedCallback(callback->info_); + delete callback; + } + delete work; + }, uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("failed to execute work"); + delete work; + } else { + callback.release(); + } +} + +void LuminationInfoCallbackListener::OnLuminationInfoChangedCallback(LuminationInfo info) const +{ + MEDIA_DEBUG_LOG("OnLuminationInfoChangedCallback is called"); + napi_value result[ARGS_TWO] = {nullptr, nullptr}; + napi_value callback = nullptr; + napi_value retVal; + for (auto it = baseCbList_.begin(); it != baseCbList_.end();) { + napi_env env = (*it)->env_; + napi_get_undefined(env, &result[PARAM0]); + napi_create_object(env, &result[PARAM1]); + napi_value isoValue; + napi_create_double(env, info.luminationValue, &isoValue); + napi_set_named_property(env, result[PARAM1], "lumination", isoValue); + napi_get_reference_value(env, (*it)->cb_, &callback); + napi_call_function(env_, nullptr, callback, ARGS_TWO, result, &retVal); + if ((*it)->isOnce_) { + napi_status status = napi_delete_reference(env, (*it)->cb_); + CHECK_AND_RETURN_LOG(status == napi_ok, "Remove once cb ref: delete reference for callback fail"); + (*it)->cb_ = nullptr; + baseCbList_.erase(it); + } else { + it++; + } + } +} + +void LuminationInfoCallbackListener::OnLuminationInfoChanged(LuminationInfo info) +{ + MEDIA_DEBUG_LOG("OnLuminationInfoChanged is called, luminationValue: %{public}f", info.luminationValue); + OnLuminationInfoChangedCallbackAsync(info); +} + +napi_value ProfessionSessionNapi::On(napi_env env, napi_callback_info info) +{ + return ListenerTemplate::On(env, info); +} + +napi_value ProfessionSessionNapi::Once(napi_env env, napi_callback_info info) +{ + return ListenerTemplate::Once(env, info); +} + +napi_value ProfessionSessionNapi::Off(napi_env env, napi_callback_info info) +{ + return ListenerTemplate::Off(env, info); +} +} // namespace CameraStandard +} // namespace OHOS \ No newline at end of file diff --git a/frameworks/js/camera_napi/src/native_module_ohos_camera.cpp b/frameworks/js/camera_napi/src/native_module_ohos_camera.cpp index d5d47cb90..1b10b7f1c 100644 --- a/frameworks/js/camera_napi/src/native_module_ohos_camera.cpp +++ b/frameworks/js/camera_napi/src/native_module_ohos_camera.cpp @@ -39,6 +39,7 @@ static napi_value Export(napi_env env, napi_value exports) MetadataOutputNapi::Init(env, exports); MetadataObjectNapi::Init(env, exports); PortraitSessionNapi::Init(env, exports); + ProfessionSessionNapi::Init(env, exports); NightSessionNapi::Init(env, exports); PhotoSessionNapi::Init(env, exports); PhotoSessionForSysNapi::Init(env, exports); diff --git a/frameworks/js/camera_napi/src/output/photo_napi.cpp b/frameworks/js/camera_napi/src/output/photo_napi.cpp index e1b31ec87..350facbae 100644 --- a/frameworks/js/camera_napi/src/output/photo_napi.cpp +++ b/frameworks/js/camera_napi/src/output/photo_napi.cpp @@ -22,9 +22,10 @@ namespace OHOS { namespace CameraStandard { thread_local napi_ref PhotoNapi::sConstructor_ = nullptr; thread_local napi_value PhotoNapi::sMainImage_ = nullptr; +thread_local napi_value PhotoNapi::sRawImage_ = nullptr; thread_local uint32_t PhotoNapi::photoTaskId = PHOTO_TASKID; -PhotoNapi::PhotoNapi() : env_(nullptr), wrapper_(nullptr), mainImage_(nullptr) +PhotoNapi::PhotoNapi() : env_(nullptr), wrapper_(nullptr), mainImage_(nullptr), rawImage_(nullptr) { } @@ -37,6 +38,9 @@ PhotoNapi::~PhotoNapi() if (mainImage_) { mainImage_ = nullptr; } + if (rawImage_) { + rawImage_ = nullptr; + } } // Constructor callback @@ -54,6 +58,7 @@ napi_value PhotoNapi::PhotoNapiConstructor(napi_env env, napi_callback_info info std::unique_ptr obj = std::make_unique(); obj->env_ = env; obj->mainImage_ = sMainImage_; + obj->rawImage_ = sRawImage_; status = napi_wrap(env, thisVar, reinterpret_cast(obj.get()), PhotoNapi::PhotoNapiDestructor, nullptr, nullptr); if (status == napi_ok) { @@ -86,6 +91,7 @@ napi_value PhotoNapi::Init(napi_env env, napi_value exports) napi_property_descriptor photo_properties[] = { // Photo DECLARE_NAPI_GETTER("main", GetMain), + DECLARE_NAPI_GETTER("rawImage", GetRaw), DECLARE_NAPI_FUNCTION("release", Release), }; @@ -138,7 +144,7 @@ napi_value PhotoNapi::GetMain(napi_env env, napi_callback_info info) size_t argc = ARGS_ZERO; napi_value argv[ARGS_ZERO]; napi_value thisVar = nullptr; - + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); napi_get_undefined(env, &result); @@ -154,6 +160,55 @@ napi_value PhotoNapi::GetMain(napi_env env, napi_callback_info info) return result; } +napi_value PhotoNapi::CreateRawPhoto(napi_env env, napi_value rawImage) +{ + MEDIA_DEBUG_LOG("CreateRawPhoto is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + napi_value constructor; + napi_get_undefined(env, &result); + + status = napi_get_reference_value(env, sConstructor_, &constructor); + if (status == napi_ok) { + sRawImage_ = rawImage; + status = napi_new_instance(env, constructor, 0, nullptr, &result); + sRawImage_ = nullptr; + if (status == napi_ok && result != nullptr) { + return result; + } else { + MEDIA_ERR_LOG("Failed to create photo obj instance"); + } + } + napi_get_undefined(env, &result); + MEDIA_ERR_LOG("CreateRawPhoto call Failed"); + return result; +} + +napi_value PhotoNapi::GetRaw(napi_env env, napi_callback_info info) +{ + MEDIA_INFO_LOG("GetRaw is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + PhotoNapi* photoNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&photoNapi)); + if (status == napi_ok && photoNapi != nullptr) { + result = photoNapi->rawImage_; + MEDIA_DEBUG_LOG("PhotoNapi::GetRaw Success"); + return result; + } + napi_get_undefined(env, &result); + MEDIA_ERR_LOG("PhotoNapi::GetRaw call Failed"); + return result; +} + napi_value PhotoNapi::Release(napi_env env, napi_callback_info info) { MEDIA_INFO_LOG("Release is called"); @@ -163,7 +218,7 @@ napi_value PhotoNapi::Release(napi_env env, napi_callback_info info) size_t argc = ARGS_ZERO; napi_value argv[ARGS_ZERO]; napi_value thisVar = nullptr; - + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); napi_get_undefined(env, &result); @@ -185,6 +240,7 @@ napi_value PhotoNapi::Release(napi_env env, napi_callback_info info) if (context->objectInfo != nullptr) { context->status = true; context->objectInfo->mainImage_ = nullptr; + context->objectInfo->rawImage_ = nullptr; } }, [](napi_env env, napi_status status, void* data) { @@ -193,7 +249,6 @@ napi_value PhotoNapi::Release(napi_env env, napi_callback_info info) napi_delete_async_work(env, context->work); delete context->objectInfo; delete context; - CAMERA_FINISH_ASYNC_TRACE(context->funcName, context->taskId); }, static_cast(asyncContext.get()), &asyncContext->work); if (status != napi_ok) { MEDIA_ERR_LOG("Failed to create napi_create_async_work for PhotoNapi::Release"); diff --git a/frameworks/js/camera_napi/src/output/photo_output_napi.cpp b/frameworks/js/camera_napi/src/output/photo_output_napi.cpp index 6ad1a9c69..bbd97c65e 100644 --- a/frameworks/js/camera_napi/src/output/photo_output_napi.cpp +++ b/frameworks/js/camera_napi/src/output/photo_output_napi.cpp @@ -13,6 +13,7 @@ * limitations under the License. */ +#include #include #include #include "camera_buffer_handle_utils.h" @@ -20,6 +21,7 @@ #include "camera_napi_security_utils.h" #include "camera_napi_template_utils.h" #include "camera_napi_utils.h" +#include "camera_output_capability.h" #include "image_napi.h" #include "image_receiver.h" #include "pixel_map_napi.h" @@ -38,6 +40,7 @@ thread_local sptr PhotoOutputNapi::sPhotoSurface_ = nullptr; thread_local uint32_t PhotoOutputNapi::photoOutputTaskId = CAMERA_PHOTO_OUTPUT_TASKID; static uv_sem_t g_captureStartSem; static bool g_isSemInited; +static std::mutex g_photoImageMutex; PhotoListener::PhotoListener(napi_env env, const sptr photoSurface) : env_(env), photoSurface_(photoSurface) { if (bufferProcessor_ == nullptr && photoSurface != nullptr) { @@ -46,9 +49,17 @@ PhotoListener::PhotoListener(napi_env env, const sptr photoSurface) : e capturePhotoCb_ = nullptr; captureDeferredPhotoCb_ = nullptr; } +RawPhotoListener::RawPhotoListener(napi_env env, + const sptr rawPhotoSurface) : env_(env), rawPhotoSurface_(rawPhotoSurface) +{ + if (bufferProcessor_ == nullptr && rawPhotoSurface != nullptr) { + bufferProcessor_ = std::make_shared (rawPhotoSurface); + } +} void PhotoListener::OnBufferAvailable() { + std::lock_guard lock(g_photoImageMutex); CAMERA_SYNC_TRACE; MEDIA_INFO_LOG("PhotoListener::OnBufferAvailable is called"); if (!photoSurface_) { @@ -264,6 +275,140 @@ void PhotoListener::RemoveCallbackRef(napi_env env, napi_value callback, const s MEDIA_INFO_LOG("RemoveCallbackReference: js callback no find"); } +void RawPhotoListener::OnBufferAvailable() +{ + std::lock_guard lock(g_photoImageMutex); + CAMERA_SYNC_TRACE; + MEDIA_INFO_LOG("RawPhotoListener::OnBufferAvailable is called"); + if (!rawPhotoSurface_) { + MEDIA_ERR_LOG("RawPhotoListener napi rawPhotoSurface_ is null"); + return; + } + UpdateJSCallbackAsync(rawPhotoSurface_); +} + +void RawPhotoListener::ExecuteRawPhoto(sptr surfaceBuffer) const +{ + MEDIA_INFO_LOG("ExecuteRawPhoto"); + napi_value result[ARGS_TWO] = {nullptr, nullptr}; + napi_value callback = nullptr; + napi_value retVal; + + napi_value rawImage = nullptr; + + std::shared_ptr image = std::make_shared(surfaceBuffer, bufferProcessor_); + + napi_get_undefined(env_, &result[PARAM0]); + napi_get_undefined(env_, &result[PARAM1]); + + rawImage = Media::ImageNapi::Create(env_, image); + if (rawImage == nullptr) { + MEDIA_ERR_LOG("ImageNapi Create failed"); + napi_get_undefined(env_, &rawImage); + } + + result[PARAM1] = PhotoNapi::CreateRawPhoto(env_, rawImage); + + napi_get_reference_value(env_, captureRawPhotoCb_, &callback); + napi_call_function(env_, nullptr, callback, ARGS_TWO, result, &retVal); + rawPhotoSurface_->ReleaseBuffer(surfaceBuffer, -1); +} + +void RawPhotoListener::UpdateJSCallback(sptr rawPhotoSurface) const +{ + sptr surfaceBuffer = nullptr; + int32_t fence = -1; + int64_t timestamp; + OHOS::Rect damage; + SurfaceError surfaceRet = rawPhotoSurface->AcquireBuffer(surfaceBuffer, fence, timestamp, damage); + if (surfaceRet != SURFACE_ERROR_OK) { + MEDIA_ERR_LOG("RawPhotoListener Failed to acquire surface buffer"); + return; + } + + int32_t isDegradedImage; + surfaceBuffer->GetExtraData()->ExtraGet(OHOS::Camera::isDegradedImage, isDegradedImage); + MEDIA_INFO_LOG("RawPhotoListener UpdateJSCallback isDegradedImage:%{public}d", isDegradedImage); + + if (isDegradedImage == 0) { + ExecuteRawPhoto(surfaceBuffer); + } else { + MEDIA_ERR_LOG("RawPhoto not support deferred photo"); + } +} + +void RawPhotoListener::UpdateJSCallbackAsync(sptr rawPhotoSurface) const +{ + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("RawPhotoListener:UpdateJSCallbackAsync() failed to get event loop"); + return; + } + uv_work_t* work = new (std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("RawPhotoListener:UpdateJSCallbackAsync() failed to allocate work"); + return; + } + std::unique_ptr callbackInfo = std::make_unique(rawPhotoSurface, this); + work->data = callbackInfo.get(); + int ret = uv_queue_work_with_qos( + loop, work, [](uv_work_t* work) {}, + [](uv_work_t* work, int status) { + RawPhotoListenerInfo* callbackInfo = reinterpret_cast(work->data); + if (callbackInfo) { + callbackInfo->listener_->UpdateJSCallback(callbackInfo->rawPhotoSurface_); + MEDIA_INFO_LOG("RawPhotoListener:UpdateJSCallbackAsync() complete"); + callbackInfo->rawPhotoSurface_ = nullptr; + callbackInfo->listener_ = nullptr; + delete callbackInfo; + } + delete work; + }, + uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("RawPhotoListener:UpdateJSCallbackAsync() failed to execute work"); + delete work; + } else { + callbackInfo.release(); + } +} + +void RawPhotoListener::SaveCallbackReference(const std::string &eventType, napi_value callback) +{ + MEDIA_INFO_LOG("RawPhotoListener SaveCallbackReference is called eventType:%{public}s", eventType.c_str()); + std::lock_guard lock(mutex_); + napi_ref *curCallbackRef; + auto eventTypeEnum = PhotoOutputEventTypeHelper.ToEnum(eventType); + switch (eventTypeEnum) { + case PhotoOutputEventType::CAPTURE_PHOTO_AVAILABLE: + curCallbackRef = &captureRawPhotoCb_; + break; + default: + MEDIA_ERR_LOG("Incorrect photo callback event type received from JS"); + return; + } + + napi_ref callbackRef = nullptr; + const int32_t refCount = 1; + napi_status status = napi_create_reference(env_, callback, refCount, &callbackRef); + CHECK_AND_RETURN_LOG(status == napi_ok && callbackRef != nullptr, + "creating reference for callback fail"); + *curCallbackRef = callbackRef; +} + +void RawPhotoListener::RemoveCallbackRef(napi_env env, napi_value callback, const std::string &eventType) +{ + std::lock_guard lock(mutex_); + + if (eventType == CONST_CAPTURE_PHOTO_AVAILABLE) { + napi_delete_reference(env_, captureRawPhotoCb_); + captureRawPhotoCb_ = nullptr; + } + + MEDIA_INFO_LOG("RemoveCallbackReference: js callback no find"); +} + PhotoOutputCallback::PhotoOutputCallback(napi_env env) : env_(env) {} void UpdateJSExecute(uv_work_t* work) @@ -940,6 +1085,7 @@ napi_value PhotoOutputNapi::PhotoOutputNapiConstructor(napi_env env, napi_callba if (status == napi_ok && thisVar != nullptr) { std::unique_ptr obj = std::make_unique(); obj->photoOutput_ = sPhotoOutput_; + obj->profile_ = sPhotoOutput_->GetPhotoProfile(); status = napi_wrap(env, thisVar, reinterpret_cast(obj.get()), PhotoOutputNapi::PhotoOutputNapiDestructor, nullptr, nullptr); if (status == napi_ok) { @@ -999,19 +1145,21 @@ napi_value PhotoOutputNapi::CreatePhotoOutput(napi_env env, Profile& profile, st MEDIA_ERR_LOG("failed to get surface"); return result; } - - MEDIA_INFO_LOG("surface width: %{public}d, height: %{public}d", photoSurface->GetDefaultWidth(), - photoSurface->GetDefaultHeight()); photoSurface->SetUserData(CameraManager::surfaceFormat, std::to_string(profile.GetCameraFormat())); sptr surfaceProducer = photoSurface->GetProducer(); + MEDIA_INFO_LOG("profile width: %{public}d, height: %{public}d, format = %{public}d, " + "surface width: %{public}d, height: %{public}d", profile.GetSize().height, + profile.GetSize().width, static_cast(profile.GetCameraFormat()), + photoSurface->GetDefaultWidth(), photoSurface->GetDefaultHeight()); int retCode = CameraManager::GetInstance()->CreatePhotoOutput(profile, surfaceProducer, &sPhotoOutput_); - if (!CameraNapiUtils::CheckError(env, retCode)) { - return nullptr; - } - if (sPhotoOutput_ == nullptr) { + if (!CameraNapiUtils::CheckError(env, retCode) || sPhotoOutput_ == nullptr) { MEDIA_ERR_LOG("failed to create CreatePhotoOutput"); return result; } + if (profile.GetCameraFormat() == CAMERA_FORMAT_DNG) { + sptr rawPhotoSurface = Surface::CreateSurfaceAsConsumer("rawPhotoOutput"); + sPhotoOutput_->SetRawPhotoInfo(rawPhotoSurface); + } status = napi_new_instance(env, constructor, 0, nullptr, &result); sPhotoOutput_ = nullptr; if (status == napi_ok && result != nullptr) { @@ -1743,6 +1891,18 @@ void PhotoOutputNapi::RegisterPhotoAvailableCallbackListener( photoListener_ = photoListener; } photoListener_->SaveCallbackReference(CONST_CAPTURE_PHOTO_AVAILABLE, callback); + if (rawPhotoListener_ == nullptr && profile_.GetCameraFormat() == CAMERA_FORMAT_DNG) { + MEDIA_INFO_LOG("new rawPhotoListener and register surface consumer listener"); + sptr rawPhotoListener = + new (std::nothrow) RawPhotoListener(env, photoOutput_->rawPhotoSurface_); + SurfaceError ret = photoOutput_->rawPhotoSurface_->RegisterConsumerListener( + (sptr&)rawPhotoListener); + if (ret != SURFACE_ERROR_OK) { + MEDIA_ERR_LOG("register surface consumer listener failed!"); + } + rawPhotoListener_ = rawPhotoListener; + rawPhotoListener_->SaveCallbackReference(CONST_CAPTURE_PHOTO_AVAILABLE, callback); + } } void PhotoOutputNapi::UnregisterPhotoAvailableCallbackListener( @@ -1751,6 +1911,9 @@ void PhotoOutputNapi::UnregisterPhotoAvailableCallbackListener( if (photoListener_ != nullptr) { photoListener_->RemoveCallbackRef(env, callback, CONST_CAPTURE_PHOTO_AVAILABLE); } + if (rawPhotoListener_ != nullptr) { + rawPhotoListener_->RemoveCallbackRef(env, callback, CONST_CAPTURE_PHOTO_AVAILABLE); + } } void PhotoOutputNapi::RegisterDeferredPhotoProxyAvailableCallbackListener( @@ -1761,7 +1924,7 @@ void PhotoOutputNapi::RegisterDeferredPhotoProxyAvailableCallbackListener( return; } if (photoListener_ == nullptr) { - MEDIA_INFO_LOG("new photoListener and register surface consumer listener"); + MEDIA_INFO_LOG("new deferred photoListener and register surface consumer listener"); sptr photoListener = new (std::nothrow) PhotoListener(env, sPhotoSurface_); SurfaceError ret = sPhotoSurface_->RegisterConsumerListener((sptr&)photoListener); if (ret != SURFACE_ERROR_OK) { diff --git a/frameworks/js/camera_napi/src/session/camera_session_napi.cpp b/frameworks/js/camera_napi/src/session/camera_session_napi.cpp index 03fdda805..c35acb4d8 100644 --- a/frameworks/js/camera_napi/src/session/camera_session_napi.cpp +++ b/frameworks/js/camera_napi/src/session/camera_session_napi.cpp @@ -98,6 +98,11 @@ const std::vector CameraSessionNapi::focus_props = { DECLARE_NAPI_FUNCTION("getFocalLength", CameraSessionNapi::GetFocalLength) }; +const std::vector CameraSessionNapi::manual_focus_props = { + DECLARE_NAPI_FUNCTION("getFocusDistance", CameraSessionNapi::GetFocusDistance), + DECLARE_NAPI_FUNCTION("setFocusDistance", CameraSessionNapi::SetFocusDistance), +}; + const std::vector CameraSessionNapi::zoom_props = { DECLARE_NAPI_FUNCTION("getZoomRatioRange", CameraSessionNapi::GetZoomRatioRange), DECLARE_NAPI_FUNCTION("getZoomRatio", CameraSessionNapi::GetZoomRatio), @@ -570,6 +575,56 @@ void SmoothZoomCallbackListener::OnSmoothZoom(int32_t duration) OnSmoothZoomCallbackAsync(duration); } +void AbilityCallbackListener::OnAbilityChangeCallbackAsync() const +{ + MEDIA_DEBUG_LOG("OnAbilityChangeCallbackAsync is called"); + uv_loop_s* loop = nullptr; + napi_get_uv_event_loop(env_, &loop); + if (!loop) { + MEDIA_ERR_LOG("failed to get event loop"); + return; + } + uv_work_t* work = new(std::nothrow) uv_work_t; + if (!work) { + MEDIA_ERR_LOG("failed to allocate work"); + return; + } + std::unique_ptr callbackInfo = std::make_unique(this); + work->data = callbackInfo.get(); + int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) { + AbilityCallbackInfo* callbackInfo = reinterpret_cast(work->data); + if (callbackInfo) { + callbackInfo->listener_->OnAbilityChangeCallback(); + delete callbackInfo; + } + delete work; + }, uv_qos_user_initiated); + if (ret) { + MEDIA_ERR_LOG("failed to execute work"); + delete work; + } else { + callbackInfo.release(); + } +} + +void AbilityCallbackListener::OnAbilityChangeCallback() const +{ + MEDIA_DEBUG_LOG("OnAbilityChangeCallback is called"); + napi_value result[ARGS_TWO]; + napi_value retVal; + napi_get_undefined(env_, &result[PARAM0]); + napi_get_undefined(env_, &result[PARAM1]); + + ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal }; + ExecuteCallback(callbackNapiPara); +} + +void AbilityCallbackListener::OnAbilityChange() +{ + MEDIA_DEBUG_LOG("OnAbilityChange is called"); + OnAbilityChangeCallbackAsync(); +} + CameraSessionNapi::CameraSessionNapi() : env_(nullptr), wrapper_(nullptr) { } @@ -595,7 +650,7 @@ napi_value CameraSessionNapi::Init(napi_env env, napi_value exports) int32_t refCount = 1; std::vector> descriptors = { camera_process_props, stabilization_props, flash_props, auto_exposure_props, focus_props, zoom_props, filter_props, beauty_props, color_effect_props, - macro_props, moon_capture_boost_props, features_props, color_management_props }; + macro_props, moon_capture_boost_props, features_props, color_management_props, manual_focus_props}; std::vector camera_session_props = CameraNapiUtils::GetPropertyDescriptor(descriptors); status = napi_define_class(env, CAMERA_SESSION_NAPI_CLASS_NAME, NAPI_AUTO_LENGTH, CameraSessionNapiConstructor, nullptr, @@ -2579,6 +2634,62 @@ napi_value CameraSessionNapi::SetColorEffect(napi_env env, napi_callback_info in return result; } +napi_value CameraSessionNapi::GetFocusDistance(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("GetFocusDistance is called"); + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ZERO; + napi_value argv[ARGS_ZERO]; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + CameraSessionNapi* cameraSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&cameraSessionNapi)); + if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) { + float distance; + int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusDistance(distance); + if (!CameraNapiUtils::CheckError(env, retCode)) { + return nullptr; + } + napi_create_double(env, distance, &result); + } else { + MEDIA_ERR_LOG("GetFocusDistance call Failed!"); + } + return result; +} + +napi_value CameraSessionNapi::SetFocusDistance(napi_env env, napi_callback_info info) +{ + MEDIA_DEBUG_LOG("SetFocusDistance is called"); + CAMERA_SYNC_TRACE; + napi_status status; + napi_value result = nullptr; + size_t argc = ARGS_ONE; + napi_value argv[ARGS_ONE] = {0}; + napi_value thisVar = nullptr; + + CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); + + napi_get_undefined(env, &result); + CameraSessionNapi* cameraSessionNapi = nullptr; + status = napi_unwrap(env, thisVar, reinterpret_cast(&cameraSessionNapi)); + if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) { + double value; + napi_get_value_double(env, argv[PARAM0], &value); + float distance = static_cast(value); + cameraSessionNapi->cameraSession_->LockForControl(); + cameraSessionNapi->cameraSession_->SetFocusDistance(distance); + MEDIA_INFO_LOG("CameraSessionNapi::SetFocusDistance set focusDistance:%{public}f!", distance); + cameraSessionNapi->cameraSession_->UnlockForControl(); + } else { + MEDIA_ERR_LOG("SetFocusDistance call Failed!"); + } + return result; +} + napi_value CameraSessionNapi::IsMacroSupported(napi_env env, napi_callback_info info) { if (!CameraNapiSecurity::CheckSystemApp(env)) { @@ -2914,6 +3025,27 @@ void CameraSessionNapi::UnregisterSessionErrorCallbackListener( sessionCallback_->RemoveCallbackRef(env, callback); } +void CameraSessionNapi::RegisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + if (abilityCallback_ == nullptr) { + auto abilityCallback = std::make_shared(env); + abilityCallback_ = abilityCallback; + cameraSession_->SetAbilityCallback(abilityCallback); + } + abilityCallback_->SaveCallbackReference(callback, isOnce); +} + +void CameraSessionNapi::UnregisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + if (abilityCallback_ == nullptr) { + MEDIA_ERR_LOG("abilityCallback is null"); + } else { + abilityCallback_->RemoveCallbackRef(env, callback); + } +} + void CameraSessionNapi::RegisterSmoothZoomCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce) { @@ -2934,6 +3066,62 @@ void CameraSessionNapi::UnregisterSmoothZoomCallbackListener( smoothZoomCallback_->RemoveCallbackRef(env, callback); } +void CameraSessionNapi::RegisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be registered in current session!"); +} + +void CameraSessionNapi::UnregisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be unregistered in current session!"); +} + +void CameraSessionNapi::RegisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be registered in current session!"); +} + +void CameraSessionNapi::UnregisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be unregistered in current session!"); +} + +void CameraSessionNapi::RegisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be registered in current session!"); +} + +void CameraSessionNapi::UnregisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be unregistered in current session!"); +} + +void CameraSessionNapi::RegisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be registered in current session!"); +} + +void CameraSessionNapi::UnregisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) +{ + CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED, + "this type callback can not be unregistered in current session!"); +} + const CameraSessionNapi::EmitterFunctions& CameraSessionNapi::GetEmitterFunctions() { static const EmitterFunctions funMap = { @@ -2957,7 +3145,22 @@ const CameraSessionNapi::EmitterFunctions& CameraSessionNapi::GetEmitterFunction &CameraSessionNapi::UnregisterSessionErrorCallbackListener } }, { "smoothZoomInfoAvailable", { &CameraSessionNapi::RegisterSmoothZoomCallbackListener, - &CameraSessionNapi::UnregisterSmoothZoomCallbackListener } } }; + &CameraSessionNapi::UnregisterSmoothZoomCallbackListener } }, + { "exposureInfo", { + &CameraSessionNapi::RegisterExposureInfoCallbackListener, + &CameraSessionNapi::UnregisterExposureInfoCallbackListener} }, + { "isoInfo", { + &CameraSessionNapi::RegisterIsoInfoCallbackListener, + &CameraSessionNapi::UnregisterIsoInfoCallbackListener } }, + { "apertureInfo", { + &CameraSessionNapi::RegisterApertureInfoCallbackListener, + &CameraSessionNapi::UnregisterApertureInfoCallbackListener } }, + { "luminationInfo", { + &CameraSessionNapi::RegisterLuminationInfoCallbackListener, + &CameraSessionNapi::UnregisterLuminationInfoCallbackListener } }, + { "abilityChange", { + &CameraSessionNapi::RegisterAbilityChangeCallbackListener, + &CameraSessionNapi::UnregisterAbilityChangeCallbackListener } } }; return funMap; } diff --git a/frameworks/native/camera/BUILD.gn b/frameworks/native/camera/BUILD.gn index 4d54e0d04..e3b90007e 100644 --- a/frameworks/native/camera/BUILD.gn +++ b/frameworks/native/camera/BUILD.gn @@ -92,6 +92,7 @@ ohos_shared_library("camera_framework") { "src/session/night_session.cpp", "src/session/photo_session.cpp", "src/session/portrait_session.cpp", + "src/session/profession_session.cpp", "src/session/scan_session.cpp", "src/session/video_session.cpp", "src/utils/camera_buffer_handle_utils.cpp", @@ -112,6 +113,7 @@ ohos_shared_library("camera_framework") { cfi = true cfi_cross_dso = true debug = false + blocklist = "../../../cfi_blocklist.txt" } public_configs = [ diff --git a/frameworks/native/camera/src/input/camera_device.cpp b/frameworks/native/camera/src/input/camera_device.cpp index d323c692b..e93de8a5e 100644 --- a/frameworks/native/camera/src/input/camera_device.cpp +++ b/frameworks/native/camera/src/input/camera_device.cpp @@ -121,7 +121,7 @@ void CameraDevice::init(common_metadata_header_t* metadata) cameraOrientation_ = item.data.i32[0]; } MEDIA_INFO_LOG("camera position: %{public}d, camera type: %{public}d, camera connection type: %{public}d, " - "Mirror Supported: %{public}d , camera foldScreen type: %{public}d, camera orientation: %{public}d", + "Mirror Supported: %{public}d, camera foldScreen type: %{public}d, camera orientation: %{public}d", cameraPosition_, cameraType_, connectionType_, isMirrorSupported_, foldScreenType_, cameraOrientation_); } @@ -245,7 +245,7 @@ std::vector CameraDevice::GetExposureBiasRange() uint32_t biasRangeCount = 2; camera_metadata_item_t item; auto metadata = GetMetadata(); - ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_COMPENSATION_RANGE, &item); + ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AE_COMPENSATION_RANGE, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("Failed to get exposure compensation range with return code %{public}d", ret); return {}; diff --git a/frameworks/native/camera/src/input/camera_info.cpp b/frameworks/native/camera/src/input/camera_info.cpp index e4cadf6d8..a513f831f 100644 --- a/frameworks/native/camera/src/input/camera_info.cpp +++ b/frameworks/native/camera/src/input/camera_info.cpp @@ -224,7 +224,7 @@ std::vector CameraInfo::GetExposureBiasRange() uint32_t biasRangeCount = 2; camera_metadata_item_t item; - ret = Camera::FindCameraMetadataItem(metadata_->get(), OHOS_CONTROL_AE_COMPENSATION_RANGE, &item); + ret = Camera::FindCameraMetadataItem(metadata_->get(), OHOS_ABILITY_AE_COMPENSATION_RANGE, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("Failed to get exposure compensation range with return code %{public}d", ret); return {}; @@ -242,7 +242,7 @@ std::vector CameraInfo::GetExposureBiasRange() } MEDIA_DEBUG_LOG("Exposure hdi compensation min: %{public}d, max: %{public}d", range[minIndex], range[maxIndex]); - ret = Camera::FindCameraMetadataItem(metadata_->get(), OHOS_CONTROL_AE_COMPENSATION_STEP, &item); + ret = Camera::FindCameraMetadataItem(metadata_->get(), OHOS_ABILITY_AE_COMPENSATION_STEP, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("Failed to get exposure compensation step with return code %{public}d", ret); return {}; diff --git a/frameworks/native/camera/src/input/camera_manager.cpp b/frameworks/native/camera/src/input/camera_manager.cpp index 3a2c7fc7e..cb910570e 100644 --- a/frameworks/native/camera/src/input/camera_manager.cpp +++ b/frameworks/native/camera/src/input/camera_manager.cpp @@ -46,7 +46,8 @@ const std::unordered_map CameraManager::metaToFwC {OHOS_CAMERA_FORMAT_YCBCR_P010, CAMERA_FORMAT_YCBCR_P010}, {OHOS_CAMERA_FORMAT_YCRCB_P010, CAMERA_FORMAT_YCRCB_P010}, {OHOS_CAMERA_FORMAT_YCBCR_420_SP, CAMERA_FORMAT_NV12}, - {OHOS_CAMERA_FORMAT_422_YUYV, CAMERA_FORMAT_YUV_422_YUYV} + {OHOS_CAMERA_FORMAT_422_YUYV, CAMERA_FORMAT_YUV_422_YUYV}, + {OHOS_CAMERA_FORMAT_DNG, CAMERA_FORMAT_DNG}, }; const std::unordered_map CameraManager::fwToMetaCameraFormat_ = { @@ -56,7 +57,9 @@ const std::unordered_map CameraManager::fwToMetaC {CAMERA_FORMAT_YCBCR_P010, OHOS_CAMERA_FORMAT_YCBCR_P010}, {CAMERA_FORMAT_YCRCB_P010, OHOS_CAMERA_FORMAT_YCRCB_P010}, {CAMERA_FORMAT_NV12, OHOS_CAMERA_FORMAT_YCBCR_420_SP}, - {CAMERA_FORMAT_YUV_422_YUYV, OHOS_CAMERA_FORMAT_422_YUYV} + {CAMERA_FORMAT_YUV_422_YUYV, OHOS_CAMERA_FORMAT_422_YUYV}, + {CAMERA_FORMAT_DNG, OHOS_CAMERA_FORMAT_DNG}, + }; const std::unordered_map g_metaToFwSupportedMode_ = { @@ -65,7 +68,8 @@ const std::unordered_map g_metaToFwSupportedMode_ = { {OperationMode::VIDEO, VIDEO}, {OperationMode::PORTRAIT, PORTRAIT}, {OperationMode::NIGHT, NIGHT}, - {OperationMode::PROFESSIONAL, PROFESSIONAL}, + {OperationMode::PROFESSIONAL_PHOTO, PROFESSIONAL_PHOTO}, + {OperationMode::PROFESSIONAL_VIDEO, PROFESSIONAL_VIDEO}, {OperationMode::SLOW_MOTION, SLOW_MOTION}, {OperationMode::SCAN_CODE, SCAN}, {OperationMode::HIGH_FRAME_RATE, HIGH_FRAME_RATE} @@ -77,7 +81,8 @@ const std::unordered_map g_fwToMetaSupportedMode_ = { {VIDEO, OperationMode::VIDEO}, {PORTRAIT, OperationMode::PORTRAIT}, {NIGHT, OperationMode::NIGHT}, - {PROFESSIONAL, OperationMode::PROFESSIONAL}, + {PROFESSIONAL_PHOTO, OperationMode::PROFESSIONAL_PHOTO}, + {PROFESSIONAL_VIDEO, OperationMode::PROFESSIONAL_VIDEO}, {SLOW_MOTION, OperationMode::SLOW_MOTION}, {SCAN, OperationMode::SCAN_CODE}, {HIGH_FRAME_RATE, OperationMode::HIGH_FRAME_RATE} @@ -263,6 +268,12 @@ sptr CameraManager::CreateCaptureSession(SceneMode mode) case SceneMode::PORTRAIT: captureSession = new(std::nothrow) PortraitSession(session); break; + case SceneMode::PROFESSIONAL_VIDEO: + captureSession = new(std::nothrow) ProfessionSession(session, cameraObjList); + break; + case SceneMode::PROFESSIONAL_PHOTO: + captureSession = new(std::nothrow) ProfessionSession(session, cameraObjList); + break; case SceneMode::SCAN: captureSession = new(std::nothrow) ScanSession(session); break; @@ -638,6 +649,7 @@ int CameraManager::CreateVideoOutput(VideoProfile &profile, sptr &surfa // todo: need to set FPS range passed in video profile. metaFormat = GetCameraMetadataFormat(profile.GetCameraFormat()); + MEDIA_DEBUG_LOG("metaFormat = %{public}d", static_cast(metaFormat)); retCode = serviceProxy_->CreateVideoOutput(surface->GetProducer(), metaFormat, profile.GetSize().width, profile.GetSize().height, streamRepeat); if (retCode == CAMERA_OK) { @@ -818,7 +830,7 @@ sptr CameraManager::GetCameraDeviceFromId(std::string cameraId) return cameraObj; } -sptr &CameraManager::GetInstance() +sptr &CameraManager::GetInstance() __attribute__((no_sanitize("cfi"))) { if (CameraManager::cameraManager_ == nullptr) { std::unique_lock lock(instanceMutex_); @@ -1203,8 +1215,10 @@ sptr CameraManager::GetSupportedOutputCapability(sptr supportedModes = GetSupportedModes(camera); + int32_t mode = (supportedModes.empty() && isTemplateMode_.count(modeName)) ? SceneMode::NORMAL : modeName; + MEDIA_INFO_LOG("GetSupportedOutputCapability by device = %{public}s, mode = %{public}d", + camera->GetID().c_str(), mode); ParseExtendCapability(cameraOutputCapability, mode, item); } else if (g_isCapabilitySupported(metadata, item, OHOS_ABILITY_STREAM_AVAILABLE_BASIC_CONFIGURATIONS)) { ParseBasicCapability(cameraOutputCapability, metadata, item); @@ -1248,11 +1262,13 @@ void CameraManager::CreateProfile4StreamType(OutputCapStreamType streamType, uin (detailInfo.fixedFps == frameRate120 || detailInfo.fixedFps == frameRate240)) { continue; } - CameraFormat format; + CameraFormat format = CAMERA_FORMAT_INVALID; auto itr = metaToFwCameraFormat_.find(static_cast(detailInfo.format)); if (itr != metaToFwCameraFormat_.end()) { format = itr->second; } else { + MEDIA_ERR_LOG("CreateProfile4StreamType failed format = %{public}d", + extendInfo.modeInfo[modeIndex].streamInfo[streamIndex].detailInfo[k].format); format = CAMERA_FORMAT_INVALID; continue; } @@ -1309,12 +1325,13 @@ void CameraManager::SetCameraServiceCallback(sptr& callb camera_format_t CameraManager::GetCameraMetadataFormat(CameraFormat format) { camera_format_t metaFormat = OHOS_CAMERA_FORMAT_YCRCB_420_SP; + MEDIA_DEBUG_LOG("format = %{public}d", static_cast(format)); auto itr = fwToMetaCameraFormat_.find(format); if (itr != fwToMetaCameraFormat_.end()) { metaFormat = itr->second; } - + MEDIA_DEBUG_LOG("metaFormat = %{public}d", static_cast(metaFormat)); return metaFormat; } diff --git a/frameworks/native/camera/src/output/photo_output.cpp b/frameworks/native/camera/src/output/photo_output.cpp index 89ebf9a3f..4b34d40ae 100644 --- a/frameworks/native/camera/src/output/photo_output.cpp +++ b/frameworks/native/camera/src/output/photo_output.cpp @@ -330,6 +330,18 @@ int32_t PhotoOutput::SetThumbnail(bool isEnabled) return streamCapturePtr->SetThumbnail(isEnabled, thumbnailSurface_->GetProducer()); } +int32_t PhotoOutput::SetRawPhotoInfo(sptr &surface) +{ + CAMERA_SYNC_TRACE; + auto streamCapturePtr = static_cast(GetStream().GetRefPtr()); + if (streamCapturePtr == nullptr) { + MEDIA_ERR_LOG("PhotoOutput::SetThumbnail Failed to create surface"); + return SERVICE_FATL_ERROR; + } + rawPhotoSurface_ = surface; + return streamCapturePtr->SetRawPhotoStreamInfo(rawPhotoSurface_->GetProducer()); +} + std::shared_ptr PhotoOutput::GetApplicationCallback() { std::lock_guard lock(outputCallbackMutex_); diff --git a/frameworks/native/camera/src/session/capture_session.cpp b/frameworks/native/camera/src/session/capture_session.cpp index e2b8479ba..5efd4deff 100644 --- a/frameworks/native/camera/src/session/capture_session.cpp +++ b/frameworks/native/camera/src/session/capture_session.cpp @@ -448,7 +448,7 @@ void CaptureSession::UpdateDeviceDeferredability() MEDIA_DEBUG_LOG("UpdateDeviceDeferredability begin."); inputDevice_->GetCameraDeviceInfo()->modeDeferredType_ = {}; camera_metadata_item_t item; - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); int32_t ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_DEFERRED_IMAGE_DELIVERY, &item); MEDIA_INFO_LOG("UpdateDeviceDeferredability get ret: %{public}d", ret); MEDIA_DEBUG_LOG("UpdateDeviceDeferredability item: %{public}d count: %{public}d", item.item, item.count); @@ -571,21 +571,35 @@ bool CaptureSession::CanAddOutput(sptr& output, SceneMode modeNam MEDIA_ERR_LOG("CaptureSession::CanAddOutput Failed inputDevice_ is nullptr"); return false; } + auto validateOutputFunc = [modeName](auto& vaildateProfile, auto& profiles, std::string&& outputType) -> bool { + bool result = std::any_of(profiles.begin(), profiles.end(), + [&vaildateProfile](const auto& profile) { return vaildateProfile == profile; }); + Size invalidSize = vaildateProfile.GetSize(); + if (result == false) { + MEDIA_ERR_LOG("CaptureSession::CanAddOutput profile invalid in " + "%{public}s_output, mode(%{public}d): w(%{public}d),h(%{public}d),f(%{public}d)", + outputType.c_str(), static_cast(modeName), + invalidSize.width, invalidSize.height, vaildateProfile.GetCameraFormat()); + } else { + MEDIA_DEBUG_LOG("CaptureSession::CanAddOutput profile pass in " + "%{public}s_output, mode(%{public}d): w(%{public}d),h(%{public}d),f(%{public}d)", + outputType.c_str(), static_cast(modeName), + invalidSize.width, invalidSize.height, vaildateProfile.GetCameraFormat()); + } + return result; + }; if (output->GetOutputType() == CAPTURE_OUTPUT_TYPE_PREVIEW) { - std::vector previewProfiles = inputDevice_->GetCameraDeviceInfo()->modePreviewProfiles_[modeName]; + std::vector profiles = inputDevice_->GetCameraDeviceInfo()->modePreviewProfiles_[modeName]; Profile vaildateProfile = output->GetPreviewProfile(); - return std::any_of(previewProfiles.begin(), previewProfiles.end(), - [&vaildateProfile](const auto& previewProfile) { return vaildateProfile == previewProfile; }); + return validateOutputFunc(vaildateProfile, profiles, std::move("preview")); } else if (output->GetOutputType() == CAPTURE_OUTPUT_TYPE_PHOTO) { - std::vector photoProfiles = inputDevice_->GetCameraDeviceInfo()->modePhotoProfiles_[modeName]; + std::vector profiles = inputDevice_->GetCameraDeviceInfo()->modePhotoProfiles_[modeName]; Profile vaildateProfile = output->GetPhotoProfile(); - return std::any_of(photoProfiles.begin(), photoProfiles.end(), - [&vaildateProfile](const auto& photoProfile) { return vaildateProfile == photoProfile; }); + return validateOutputFunc(vaildateProfile, profiles, std::move("photo")); } else if (output->GetOutputType() == CAPTURE_OUTPUT_TYPE_VIDEO) { - std::vector videoProfiles = inputDevice_->GetCameraDeviceInfo()->modeVideoProfiles_[modeName]; + std::vector profiles = inputDevice_->GetCameraDeviceInfo()->modeVideoProfiles_[modeName]; VideoProfile vaildateProfile = output->GetVideoProfile(); - return std::any_of(videoProfiles.begin(), videoProfiles.end(), - [&vaildateProfile](const auto& profile) { return vaildateProfile == profile; }); + return validateOutputFunc(vaildateProfile, profiles, std::move("video")); } else if (output->GetOutputType() == CAPTURE_OUTPUT_TYPE_METADATA) { MEDIA_INFO_LOG("CaptureSession::CanAddOutput MetadataOutput"); return true; @@ -741,6 +755,7 @@ int32_t CaptureSession::Release() macroStatusCallback_ = nullptr; moonCaptureBoostStatusCallback_ = nullptr; smoothZoomCallback_ = nullptr; + abilityCallback_ = nullptr; return ServiceToCameraError(errCode); } @@ -831,7 +846,7 @@ int32_t CaptureSession::UpdateSetting(std::shared_ptr ch return ServiceToCameraError(ret); } - std::shared_ptr baseMetadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr baseMetadata = GetMetadata(); for (uint32_t index = 0; index < count; index++) { camera_metadata_item_t srcItem; int ret = OHOS::Camera::GetCameraMetadataItem(metadataHeader, index, &srcItem); @@ -1132,7 +1147,7 @@ std::vector CaptureSession::GetSupportedExposureModes() return {}; } std::vector supportedExposureModes; - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_EXPOSURE_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -1160,7 +1175,7 @@ int32_t CaptureSession::GetSupportedExposureModes(std::vector& sup MEDIA_ERR_LOG("CaptureSession::GetSupportedExposureModes camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_EXPOSURE_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -1225,7 +1240,7 @@ ExposureMode CaptureSession::GetExposureMode() MEDIA_ERR_LOG("CaptureSession::GetExposureMode camera device is null"); return EXPOSURE_MODE_UNSUPPORTED; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_EXPOSURE_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -1251,7 +1266,7 @@ int32_t CaptureSession::GetExposureMode(ExposureMode& exposureMode) MEDIA_ERR_LOG("CaptureSession::GetExposureMode camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_EXPOSURE_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -1310,7 +1325,7 @@ Point CaptureSession::GetMeteringPoint() MEDIA_ERR_LOG("CaptureSession::GetMeteringPoint camera device is null"); return exposurePoint; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_REGIONS, &item); if (ret != CAM_META_SUCCESS) { @@ -1335,7 +1350,7 @@ int32_t CaptureSession::GetMeteringPoint(Point& exposurePoint) MEDIA_ERR_LOG("CaptureSession::GetMeteringPoint camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_REGIONS, &item); if (ret != CAM_META_SUCCESS) { @@ -1410,10 +1425,6 @@ int32_t CaptureSession::SetExposureBias(float exposureValue) "%{public}f is greater than maximum bias: %{public}f", exposureValue, biasRange[maxIndex]); exposureValue = biasRange[maxIndex]; } - if (std::abs(exposureValue) <= 1e-6) { - MEDIA_ERR_LOG("CaptureSession::SetExposureValue exposure compensation value no need to change"); - return CameraErrorCode::SUCCESS; - } int32_t exposureCompensation = CalculateExposureValue(exposureValue); @@ -1439,7 +1450,7 @@ float CaptureSession::GetExposureValue() MEDIA_ERR_LOG("CaptureSession::GetExposureValue camera device is null"); return 0; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_EXPOSURE_COMPENSATION, &item); if (ret != CAM_META_SUCCESS) { @@ -1448,7 +1459,7 @@ float CaptureSession::GetExposureValue() } int32_t exposureCompensation = item.data.i32[0]; - ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_COMPENSATION_STEP, &item); + ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AE_COMPENSATION_STEP, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("CaptureSession::GetExposureValue Failed with return code %{public}d", ret); return 0; @@ -1477,7 +1488,7 @@ int32_t CaptureSession::GetExposureValue(float& exposureValue) MEDIA_ERR_LOG("CaptureSession::GetExposureValue camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_EXPOSURE_COMPENSATION, &item); if (ret != CAM_META_SUCCESS) { @@ -1486,7 +1497,7 @@ int32_t CaptureSession::GetExposureValue(float& exposureValue) } int32_t exposureCompensation = item.data.i32[0]; - ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_COMPENSATION_STEP, &item); + ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AE_COMPENSATION_STEP, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("CaptureSession::GetExposureValue Failed with return code %{public}d", ret); return 0; @@ -1540,7 +1551,7 @@ std::vector CaptureSession::GetSupportedFocusModes() MEDIA_ERR_LOG("CaptureSession::GetSupportedFocusModes camera device is null"); return supportedFocusModes; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCUS_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -1567,7 +1578,7 @@ int32_t CaptureSession::GetSupportedFocusModes(std::vector& supported MEDIA_ERR_LOG("CaptureSession::GetSupportedFocusModes camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCUS_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -1667,7 +1678,7 @@ FocusMode CaptureSession::GetFocusMode() MEDIA_ERR_LOG("CaptureSession::GetFocusMode camera device is null"); return FOCUS_MODE_MANUAL; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FOCUS_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -1692,7 +1703,7 @@ int32_t CaptureSession::GetFocusMode(FocusMode& focusMode) MEDIA_ERR_LOG("CaptureSession::GetFocusMode camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FOCUS_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -1791,7 +1802,7 @@ Point CaptureSession::GetFocusPoint() MEDIA_ERR_LOG("CaptureSession::GetFocusPoint camera device is null"); return focusPoint; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AF_REGIONS, &item); if (ret != CAM_META_SUCCESS) { @@ -1816,7 +1827,7 @@ int32_t CaptureSession::GetFocusPoint(Point& focusPoint) MEDIA_ERR_LOG("CaptureSession::GetFocusPoint camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AF_REGIONS, &item); if (ret != CAM_META_SUCCESS) { @@ -1839,7 +1850,7 @@ float CaptureSession::GetFocalLength() MEDIA_ERR_LOG("CaptureSession::GetFocalLength camera device is null"); return 0; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCAL_LENGTH, &item); if (ret != CAM_META_SUCCESS) { @@ -1860,7 +1871,7 @@ int32_t CaptureSession::GetFocalLength(float& focalLength) MEDIA_ERR_LOG("CaptureSession::GetFocalLength camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCAL_LENGTH, &item); if (ret != CAM_META_SUCCESS) { @@ -1972,7 +1983,7 @@ std::vector CaptureSession::GetSupportedFlashModes() MEDIA_ERR_LOG("CaptureSession::GetSupportedFlashModes camera device is null"); return supportedFlashModes; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FLASH_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -1999,7 +2010,7 @@ int32_t CaptureSession::GetSupportedFlashModes(std::vector& supported MEDIA_ERR_LOG("CaptureSession::GetSupportedFlashModes camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FLASH_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -2025,7 +2036,7 @@ FlashMode CaptureSession::GetFlashMode() MEDIA_ERR_LOG("CaptureSession::GetFlashMode camera device is null"); return FLASH_MODE_CLOSE; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FLASH_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -2051,7 +2062,7 @@ int32_t CaptureSession::GetFlashMode(FlashMode& flashMode) MEDIA_ERR_LOG("CaptureSession::GetFlashMode camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FLASH_MODE, &item); if (ret != CAM_META_SUCCESS) { @@ -2168,7 +2179,7 @@ std::vector CaptureSession::GetZoomRatioRange() MEDIA_ERR_LOG("CaptureSession::GetZoomRatioRange camera device is null"); return {}; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SCENE_ZOOM_CAP, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -2206,7 +2217,7 @@ int32_t CaptureSession::GetZoomRatioRange(std::vector& zoomRatioRange) MEDIA_ERR_LOG("CaptureSession::GetZoomRatioRange camera device is null"); return CameraErrorCode::SUCCESS; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SCENE_ZOOM_CAP, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -2504,7 +2515,7 @@ void CaptureSession::SetCaptureMetadataObjectTypes(std::setLockForControl(); if (!this->changedMetadata_->addEntry(OHOS_STATISTICS_FACE_DETECT_SWITCH, &objectType, count)) { - MEDIA_ERR_LOG("lvxq SetCaptureMetadataObjectTypes: Failed to add detect object types to changed metadata"); + MEDIA_ERR_LOG("SetCaptureMetadataObjectTypes: Failed to add detect object types to changed metadata"); } this->UnlockForControl(); } @@ -2595,7 +2606,7 @@ int32_t CaptureSession::VerifyAbility(uint32_t ability) } ProcessProfilesAbilityId(portraitMode); - + std::vector photoAbilityId = previewProfile_.GetAbilityId(); std::vector previewAbilityId = previewProfile_.GetAbilityId(); @@ -2667,7 +2678,7 @@ std::vector CaptureSession::GetSupportedFilters() return supportedFilters; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SCENE_FILTER_TYPES, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -2693,7 +2704,7 @@ FilterType CaptureSession::GetFilter() MEDIA_ERR_LOG("CaptureSession::GetFilter camera device is null"); return FilterType::NONE; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FILTER_TYPE, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -2770,7 +2781,7 @@ std::vector CaptureSession::GetSupportedBeautyTypes() return supportedBeautyTypes; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SCENE_BEAUTY_TYPES, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -2805,7 +2816,7 @@ std::vector CaptureSession::GetSupportedBeautyRange(BeautyType beautyTy return supportedBeautyRange; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; MEDIA_ERR_LOG("CaptureSession::GetSupportedBeautyRange: %{public}d", beautyType); @@ -2976,6 +2987,93 @@ int32_t CaptureSession::GetBeauty(BeautyType beautyType) return beautyLevel; } +// focus distance +float CaptureSession::GetMinimumFocusDistance() +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("CaptureSession::GetMinimumFocusDistance Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("CaptureSession::GetMinimumFocusDistance camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int32_t ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("CaptureSession::GetMinimumFocusDistance Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + float minimumFocusDistance = item.data.f[0]; + MEDIA_DEBUG_LOG("CaptureSession::GetMinimumFocusDistance minimumFocusDistance=%{public}f", minimumFocusDistance); + return minimumFocusDistance; +} + +int32_t CaptureSession::GetFocusDistance(float& focusDistance) +{ + focusDistance = 0; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("CaptureSession::GetFocusDistance Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("CaptureSession::GetFocusDistance camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int32_t ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_LENS_FOCUS_DISTANCE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("CaptureSession::GetFocusDistance Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance meta=%{public}f", item.data.f[0]); + if (FloatIsEqual(GetMinimumFocusDistance(), 0.0)) { + MEDIA_ERR_LOG("CaptureSession::GetFocusDistance minimum distance is 0"); + return CameraErrorCode::SUCCESS; + } + focusDistance = 1- (item.data.f[0] / GetMinimumFocusDistance()); + MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance focusDistance = %{public}f", focusDistance); + return CameraErrorCode::SUCCESS; +} + +int32_t CaptureSession::SetFocusDistance(float focusDistance) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("CaptureSession::SetFocusDistance Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("CaptureSession::SetFocusDistance Need to call LockForControl " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + bool status = false; + uint32_t count = 1; + int32_t ret; + MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance app set focusDistance = %{public}f", focusDistance); + camera_metadata_item_t item; + if (focusDistance < 0) { + focusDistance = 0; + } else if (focusDistance > 1) { + focusDistance = 1; + } + float value = (1 - focusDistance) * GetMinimumFocusDistance(); + MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance meta set focusDistance = %{public}f", value); + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_LENS_FOCUS_DISTANCE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_LENS_FOCUS_DISTANCE, &value, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_LENS_FOCUS_DISTANCE, &value, count); + } + if (!status) { + MEDIA_ERR_LOG("CaptureSession::SetFocusDistance Failed to set"); + } + return CameraErrorCode::SUCCESS; +} + void CaptureSession::SetFrameRateRange(const std::vector& frameRateRange) { std::vector videoFrameRateRange = frameRateRange; @@ -3016,8 +3114,8 @@ bool CaptureSession::IsSessionCommited() int32_t CaptureSession::CalculateExposureValue(float exposureValue) { camera_metadata_item_t item; - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); - int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AE_COMPENSATION_STEP, &item); + std::shared_ptr metadata = GetMetadata(); + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AE_COMPENSATION_STEP, &item); if (ret != CAM_META_SUCCESS) { MEDIA_ERR_LOG("CaptureSession::Get Ae Compensation step Failed with return code %{public}d", ret); return CameraErrorCode::OPERATION_NOT_ALLOWED; @@ -3045,7 +3143,7 @@ ColorSpaceInfo CaptureSession::GetSupportedColorSpaceInfo() MEDIA_ERR_LOG("CaptureSession::GetSupportedColorSpaceInfo camera device is null"); return colorSpaceInfo; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AVAILABLE_COLOR_SPACES, &item); if (ret != CAM_META_SUCCESS) { @@ -3130,7 +3228,7 @@ int32_t CaptureSession::SetColorSpace(ColorSpace colorSpace) CM_ColorSpaceType captureColorSpace = metaColorSpace; ColorSpaceInfo colorSpaceInfo = GetSupportedColorSpaceInfo(); - + ColorSpace fwkCaptureColorSpace; auto it = g_metaColorSpaceMap_.find(captureColorSpace); if (it != g_metaColorSpaceMap_.end()) { @@ -3165,7 +3263,7 @@ int32_t CaptureSession::ProcessCaptureColorSpace(ColorSpaceInfo colorSpaceInfo, if (GetMode() != colorSpaceInfo.modeInfo[i].modeType) { continue; } - + auto it = g_fwkColorSpaceMap_.find(fwkCaptureColorSpace); if (it != g_fwkColorSpaceMap_.end()) { metaColorSpace = it->second; @@ -3226,7 +3324,7 @@ std::vector CaptureSession::GetSupportedColorEffects() MEDIA_ERR_LOG("CaptureSession::GetSupportedColorEffects camera device is null"); return supportedColorEffects; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SUPPORTED_COLOR_MODES, &item); if (ret != CAM_META_SUCCESS) { @@ -3253,7 +3351,7 @@ ColorEffect CaptureSession::GetColorEffect() MEDIA_ERR_LOG("CaptureSession::GetColorEffect camera device is null"); return colorEffect; } - std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + std::shared_ptr metadata = GetMetadata(); camera_metadata_item_t item; int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_SUPPORTED_COLOR_MODES, &item); if (ret != CAM_META_SUCCESS || item.count == 0) { @@ -3649,5 +3747,26 @@ void CaptureSession::SetUserId() MEDIA_DEBUG_LOG("CaptureSession::SetUserId Failed"); } } -} // namespace CameraStandard -} // namespace OHOS + +void CaptureSession::ExecuteAbilityChangeCallback() +{ + std::lock_guard lock(sessionCallbackMutex_); + if (abilityCallback_ != nullptr) { + abilityCallback_->OnAbilityChange(); + } +} + +void CaptureSession::SetAbilityCallback(std::shared_ptr abilityCallback) +{ + MEDIA_ERR_LOG("CaptureSession::SetAbilityCallback() set ability callback"); + std::lock_guard lock(sessionCallbackMutex_); + abilityCallback_ = abilityCallback; + return; +} + +std::shared_ptr CaptureSession::GetMetadata() +{ + return inputDevice_->GetCameraDeviceInfo()->GetMetadata(); +} +} // CameraStandard +} // OHOS \ No newline at end of file diff --git a/frameworks/native/camera/src/session/portrait_session.cpp b/frameworks/native/camera/src/session/portrait_session.cpp index 9232ae796..e4c9335e7 100644 --- a/frameworks/native/camera/src/session/portrait_session.cpp +++ b/frameworks/native/camera/src/session/portrait_session.cpp @@ -218,12 +218,6 @@ float PortraitSession::GetVirtualAperture() return virtualAperture; } -bool PortraitSession::FloatIsEqual(float x, float y) -{ - const float EPSILON = 0.000001; - return std::fabs(x - y) < EPSILON; -} - void PortraitSession::SetVirtualAperture(const float virtualAperture) { CAMERA_SYNC_TRACE; @@ -237,7 +231,7 @@ void PortraitSession::SetVirtualAperture(const float virtualAperture) } std::vector supportedVirtualApertures = GetSupportedVirtualApertures(); auto res = std::find_if(supportedVirtualApertures.begin(), supportedVirtualApertures.end(), - [&virtualAperture, this](const float item) {return FloatIsEqual(virtualAperture, item);}); + [&virtualAperture](const float item) {return FloatIsEqual(virtualAperture, item);}); if (res == supportedVirtualApertures.end()) { MEDIA_ERR_LOG("current virtualAperture is not supported"); return; @@ -351,7 +345,7 @@ void PortraitSession::SetPhysicalAperture(const float physicalAperture) } int physicalAperturesIndex = 2; auto res = std::find_if(std::next((*it).begin(), physicalAperturesIndex), (*it).end(), - [&physicalAperture, this](const float physicalApertureTemp) { + [&physicalAperture](const float physicalApertureTemp) { return FloatIsEqual(physicalAperture, physicalApertureTemp); }); if (res == (*it).end()) { diff --git a/frameworks/native/camera/src/session/profession_session.cpp b/frameworks/native/camera/src/session/profession_session.cpp new file mode 100644 index 000000000..8a6021604 --- /dev/null +++ b/frameworks/native/camera/src/session/profession_session.cpp @@ -0,0 +1,1558 @@ +/* + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "session/profession_session.h" +#include "camera_metadata_operator.h" +#include "camera_util.h" +#include "hcapture_session_callback_stub.h" +#include "input/camera_input.h" +#include "camera_log.h" +#include "output/photo_output.h" +#include "output/preview_output.h" +#include "output/video_output.h" +#include +#include + +namespace OHOS { +namespace CameraStandard { +ProfessionSession::~ProfessionSession() +{ + exposureInfoCallback_ = nullptr; + isoInfoCallback_ = nullptr; + apertureInfoCallback_ = nullptr; + luminationInfoCallback_ = nullptr; +} +// metering mode +const std::unordered_map ProfessionSession::metaMeteringModeMap_ = { + {OHOS_CAMERA_SPOT_METERING, METERING_MODE_SPOT}, + {OHOS_CAMERA_REGION_METERING, METERING_MODE_REGION}, + {OHOS_CAMERA_OVERALL_METERING, METERING_MODE_OVERALL}, + {OHOS_CAMERA_CENTER_WEIGHTED_METERING, METERING_MODE_CENTER_WEIGHTED} +}; + +const std::unordered_map ProfessionSession::fwkMeteringModeMap_ = { + {METERING_MODE_SPOT, OHOS_CAMERA_SPOT_METERING}, + {METERING_MODE_REGION, OHOS_CAMERA_REGION_METERING}, + {METERING_MODE_OVERALL, OHOS_CAMERA_OVERALL_METERING}, + {METERING_MODE_CENTER_WEIGHTED, OHOS_CAMERA_CENTER_WEIGHTED_METERING} +}; + +// FocusAssistFlash mode +const std::unordered_map + ProfessionSession::metaFocusAssistFlashModeMap_ = { + { OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_DEFAULT, FOCUS_ASSIST_FLASH_MODE_DEFAULT }, + { OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_AUTO, FOCUS_ASSIST_FLASH_MODE_AUTO }, + { OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_ON, FOCUS_ASSIST_FLASH_MODE_ON }, + { OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_OFF, FOCUS_ASSIST_FLASH_MODE_OFF }, +}; +const std::unordered_map + ProfessionSession::fwkFocusAssistFlashModeMap_ = { + { FOCUS_ASSIST_FLASH_MODE_DEFAULT, OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_DEFAULT }, + { FOCUS_ASSIST_FLASH_MODE_AUTO, OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_AUTO }, + { FOCUS_ASSIST_FLASH_MODE_ON, OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_ON }, + { FOCUS_ASSIST_FLASH_MODE_OFF, OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_OFF }, +}; +// WhiteBalanceMode +const std::unordered_map ProfessionSession::metaWhiteBalanceModeMap_ = { + { OHOS_CAMERA_AWB_MODE_OFF, AWB_MODE_OFF }, + { OHOS_CAMERA_AWB_MODE_AUTO, AWB_MODE_AUTO }, + { OHOS_CAMERA_AWB_MODE_INCANDESCENT, AWB_MODE_INCANDESCENT }, + { OHOS_CAMERA_AWB_MODE_FLUORESCENT, AWB_MODE_FLUORESCENT }, + { OHOS_CAMERA_AWB_MODE_WARM_FLUORESCENT, AWB_MODE_WARM_FLUORESCENT }, + { OHOS_CAMERA_AWB_MODE_DAYLIGHT, AWB_MODE_DAYLIGHT }, + { OHOS_CAMERA_AWB_MODE_CLOUDY_DAYLIGHT, AWB_MODE_CLOUDY_DAYLIGHT }, + { OHOS_CAMERA_AWB_MODE_TWILIGHT, AWB_MODE_TWILIGHT }, + { OHOS_CAMERA_AWB_MODE_SHADE, AWB_MODE_SHADE }, +}; +const std::unordered_map ProfessionSession::fwkWhiteBalanceModeMap_ = { + { AWB_MODE_OFF, OHOS_CAMERA_AWB_MODE_OFF }, + { AWB_MODE_AUTO, OHOS_CAMERA_AWB_MODE_AUTO }, + { AWB_MODE_INCANDESCENT, OHOS_CAMERA_AWB_MODE_INCANDESCENT }, + { AWB_MODE_FLUORESCENT, OHOS_CAMERA_AWB_MODE_FLUORESCENT }, + { AWB_MODE_WARM_FLUORESCENT, OHOS_CAMERA_AWB_MODE_WARM_FLUORESCENT }, + { AWB_MODE_DAYLIGHT, OHOS_CAMERA_AWB_MODE_DAYLIGHT }, + { AWB_MODE_CLOUDY_DAYLIGHT, OHOS_CAMERA_AWB_MODE_CLOUDY_DAYLIGHT }, + { AWB_MODE_TWILIGHT, OHOS_CAMERA_AWB_MODE_TWILIGHT }, + { AWB_MODE_SHADE, OHOS_CAMERA_AWB_MODE_SHADE }, +}; +// ExposureHintMode +const std::unordered_map + ProfessionSession::metaExposureHintModeMap_ = { + { OHOS_CAMERA_EXPOSURE_HINT_UNSUPPORTED, EXPOSURE_HINT_UNSUPPORTED }, + { OHOS_CAMERA_EXPOSURE_HINT_MODE_ON, EXPOSURE_HINT_MODE_ON }, + { OHOS_CAMERA_EXPOSURE_HINT_MODE_OFF, EXPOSURE_HINT_MODE_OFF }, +}; +const std::unordered_map + ProfessionSession::fwkExposureHintModeMap_ = { + { EXPOSURE_HINT_UNSUPPORTED, OHOS_CAMERA_EXPOSURE_HINT_UNSUPPORTED }, + { EXPOSURE_HINT_MODE_ON, OHOS_CAMERA_EXPOSURE_HINT_MODE_ON }, + { EXPOSURE_HINT_MODE_OFF, OHOS_CAMERA_EXPOSURE_HINT_MODE_OFF }, +}; +// metering mode +int32_t ProfessionSession::GetSupportedMeteringModes(std::vector &supportedMeteringModes) +{ + supportedMeteringModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedMeteringModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedMeteringModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_METER_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedMeteringModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaMeteringModeMap_.find(static_cast(item.data.u8[i])); + if (itr != metaMeteringModeMap_.end()) { + supportedMeteringModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsMeteringModeSupported(MeteringMode meteringMode, bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsMeteringModeSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedMeteringModeList; + (void)this->GetSupportedMeteringModes(vecSupportedMeteringModeList); + if (find(vecSupportedMeteringModeList.begin(), vecSupportedMeteringModeList.end(), + meteringMode) != vecSupportedMeteringModeList.end()) { + isSupported = true; + return CameraErrorCode::SUCCESS; + } + isSupported = false; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetMeteringMode(MeteringMode mode) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetMeteringMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetMeteringMode Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + camera_meter_mode_t meteringMode = OHOS_CAMERA_SPOT_METERING; + auto itr = fwkMeteringModeMap_.find(mode); + if (itr == fwkMeteringModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetMeteringMode Unknown exposure mode"); + } else { + meteringMode = itr->second; + } + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + + MEDIA_DEBUG_LOG("ProfessionSession::SetMeteringMode metering mode: %{public}d", meteringMode); + + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_METER_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_METER_MODE, &meteringMode, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_METER_MODE, &meteringMode, count); + } + + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetMeteringMode Failed to set focus mode"); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetMeteringMode(MeteringMode &meteringMode) +{ + meteringMode = METERING_MODE_SPOT; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetMeteringMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetMeteringMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_METER_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetMeteringMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaMeteringModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaMeteringModeMap_.end()) { + meteringMode = itr->second; + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} +// ISO +int32_t ProfessionSession::GetIsoRange(std::vector &isoRange) +{ + isoRange.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetIsoRange Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetIsoRange camera device is null"); + return CameraErrorCode::INVALID_ARGUMENT; + } + std::shared_ptr metadata = GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_ISO_VALUES, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("ProfessionSession::GetIsoRange Failed with return code %{public}d", ret); + return CameraErrorCode::INVALID_ARGUMENT; + } + std::vector > modeIsoRanges = {}; + std::vector modeRange = {}; + for (uint32_t i = 0; i < item.count; i++) { + if (item.data.i32[i] != -1) { + modeRange.emplace_back(item.data.i32[i]); + continue; + } + MEDIA_DEBUG_LOG("ProfessionSession::GetIsoRange mode %{public}d, range=%{public}s", + GetMode(), Container2String(modeRange.begin(), modeRange.end()).c_str()); + modeIsoRanges.emplace_back(std::move(modeRange)); + modeRange.clear(); + } + + for (auto it : modeIsoRanges) { + MEDIA_DEBUG_LOG("ProfessionSession::GetIsoRange ranges=%{public}s", + Container2String(it.begin(), it.end()).c_str()); + if (GetMode() == it.at(0)) { + isoRange.resize(it.size() - 1); + std::copy(it.begin() + 1, it.end(), isoRange.begin()); + } + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetIsoRange isoRange=%{public}s, len = %{public}zu", + Container2String(isoRange.begin(), isoRange.end()).c_str(), isoRange.size()); + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetISO(int32_t iso) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetISO Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetISO Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + bool status = false; + int32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetISO iso value: %{public}d", iso); + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::SetISO camera device is null"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + + std::vector isoRange; + if ((GetIsoRange(isoRange) != CameraErrorCode::SUCCESS) && isoRange.empty()) { + MEDIA_ERR_LOG("ProfessionSession::SetISO range is empty"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + const int32_t autoIsoValue = 0; + if (iso != autoIsoValue && std::find(isoRange.begin(), isoRange.end(), iso) == isoRange.end()) { + return CameraErrorCode::INVALID_ARGUMENT; + } + int ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_ISO_VALUE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_ISO_VALUE, &iso, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_ISO_VALUE, &iso, count); + } + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetISO Failed to set exposure compensation"); + } + isoValue_ = iso; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetISO(int32_t &iso) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetISO Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetISO camera device is null"); + return CameraErrorCode::INVALID_ARGUMENT; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_ISO_VALUE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetISO Failed with return code %{public}d", ret); + return CameraErrorCode::INVALID_ARGUMENT; + } + iso = item.data.i32[0]; + MEDIA_DEBUG_LOG("iso: %{public}d", iso); + return CameraErrorCode::SUCCESS; +} + +bool ProfessionSession::IsManualIsoSupported() +{ + CAMERA_SYNC_TRACE; + MEDIA_DEBUG_LOG("Enter IsManualIsoSupported"); + + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsManualIsoSupported Session is not Commited"); + return false; + } + if (inputDevice_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::IsManualIsoSupported camera device is null"); + return false; + } + auto deviceInfo = inputDevice_->GetCameraDeviceInfo(); + if (deviceInfo == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::IsManualIsoSupported camera deviceInfo is null"); + return false; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_ISO_VALUES, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("ProfessionSession::IsMacroSupported Failed with return code %{public}d", ret); + return false; + } + return true; +} + +// SensorExposureTime +int32_t ProfessionSession::GetSensorExposureTimeRange(std::vector &sensorExposureTimeRange) +{ + sensorExposureTimeRange.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTimeRange Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTimeRange camera device is null"); + return CameraErrorCode::INVALID_ARGUMENT; + } + std::shared_ptr metadata = GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SENSOR_EXPOSURE_TIME_RANGE, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTimeRange Failed with return code %{public}d", ret); + return CameraErrorCode::INVALID_ARGUMENT; + } + + int32_t numerator = 0; + int32_t denominator = 0; + uint32_t value = 0; + constexpr int32_t timeUnit = 1000000; + for (uint32_t i = 0; i < item.count; i++) { + numerator = item.data.r[i].numerator; + denominator = item.data.r[i].denominator; + value = numerator / (denominator / timeUnit); + MEDIA_DEBUG_LOG("ProfessionSession::GetSensorExposureTimeRange numerator=%{public}d, denominator=%{public}d," + " value=%{public}d", numerator, denominator, value); + if (denominator == 0) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTimeRange divide by 0! numerator=%{public}d", numerator); + return CameraErrorCode::INVALID_ARGUMENT; + } + sensorExposureTimeRange.emplace_back(value); + } + MEDIA_INFO_LOG("ProfessionSessionNapi::GetSensorExposureTimeRange range=%{public}s, len = %{public}zu", + Container2String(sensorExposureTimeRange.begin(), sensorExposureTimeRange.end()).c_str(), + sensorExposureTimeRange.size()); + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetSensorExposureTime(uint32_t exposureTime) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + bool status = false; + int32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetSensorExposureTime exposure: %{public}d", exposureTime); + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime camera device is null"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + std::vector sensorExposureTimeRange; + if ((GetSensorExposureTimeRange(sensorExposureTimeRange) != CameraErrorCode::SUCCESS) && + sensorExposureTimeRange.empty()) { + MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime range is empty"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + const uint32_t autoLongExposure = 0; + int32_t minIndex = 0; + int32_t maxIndex = 1; + if (exposureTime != autoLongExposure && exposureTime < sensorExposureTimeRange[minIndex]) { + MEDIA_DEBUG_LOG("ProfessionSession::SetSensorExposureTime exposureTime:" + "%{public}d is lesser than minimum exposureTime: %{public}d", + exposureTime, sensorExposureTimeRange[minIndex]); + exposureTime = sensorExposureTimeRange[minIndex]; + } else if (exposureTime > sensorExposureTimeRange[maxIndex]) { + MEDIA_DEBUG_LOG("ProfessionSession::SetSensorExposureTime exposureTime: " + "%{public}d is greater than maximum exposureTime: %{public}d", + exposureTime, sensorExposureTimeRange[maxIndex]); + exposureTime = sensorExposureTimeRange[maxIndex]; + } + constexpr int32_t timeUnit = 1000000; + camera_rational_t value = {.numerator = exposureTime, .denominator = timeUnit}; + int ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &value, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &value, count); + } + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime Failed to set exposure compensation"); + } + exposureDurationValue_ = exposureTime; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetSensorExposureTime(uint32_t &exposureTime) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTime Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTime camera device is null"); + return CameraErrorCode::INVALID_ARGUMENT; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTime Failed with return code %{public}d", ret); + return CameraErrorCode::INVALID_ARGUMENT; + } + exposureTime = item.data.ui32[0]; + MEDIA_DEBUG_LOG("ProfessionSession::GetSensorExposureTime exposureTime: %{public}d", exposureTime); + return CameraErrorCode::SUCCESS; +} + +// focus mode +int32_t ProfessionSession::GetSupportedFocusModes(std::vector &supportedFocusModes) +{ + supportedFocusModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCUS_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaFocusModeMap_.find(static_cast(item.data.u8[i])); + if (itr != metaFocusModeMap_.end()) { + supportedFocusModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsFocusModeSupported(FocusMode focusMode, bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsFocusModeSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedMeteringModeList; + (void)(this->GetSupportedFocusModes(vecSupportedMeteringModeList)); + if (find(vecSupportedMeteringModeList.begin(), vecSupportedMeteringModeList.end(), + focusMode) != vecSupportedMeteringModeList.end()) { + isSupported = true; + return CameraErrorCode::SUCCESS; + } + isSupported = false; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetFocusMode(FocusMode focusMode) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusMode Need to call LockForControl() before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + uint8_t focus = FOCUS_MODE_LOCKED; + auto itr = fwkFocusModeMap_.find(focusMode); + if (itr == fwkFocusModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusMode Unknown exposure mode"); + } else { + focus = itr->second; + } + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + + MEDIA_DEBUG_LOG("ProfessionSession::SetFocusMode Focus mode: %{public}d", focusMode); + + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_FOCUS_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_FOCUS_MODE, &focus, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_FOCUS_MODE, &focus, count); + } + + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusMode Failed to set focus mode"); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetFocusMode(FocusMode &focusMode) +{ + focusMode = FOCUS_MODE_MANUAL; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FOCUS_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaFocusModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaFocusModeMap_.end()) { + focusMode = itr->second; + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} + +// white balance mode +int32_t ProfessionSession::GetSupportedWhiteBalanceModes(std::vector &supportedWhiteBalanceModes) +{ + supportedWhiteBalanceModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedWhiteBalanceModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedWhiteBalanceModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_AWB_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedWhiteBalanceModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaWhiteBalanceModeMap_.find(static_cast(item.data.u8[i])); + if (itr != metaWhiteBalanceModeMap_.end()) { + supportedWhiteBalanceModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsWhiteBalanceModeSupported(WhiteBalanceMode mode, bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsFocusModeSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedWhiteBalanceModeList; + (void)this->GetSupportedWhiteBalanceModes(vecSupportedWhiteBalanceModeList); + if (find(vecSupportedWhiteBalanceModeList.begin(), vecSupportedWhiteBalanceModeList.end(), + mode) != vecSupportedWhiteBalanceModeList.end()) { + isSupported = true; + return CameraErrorCode::SUCCESS; + } + isSupported = false; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetWhiteBalanceMode(WhiteBalanceMode mode) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetWhiteBalanceMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetWhiteBalanceMode Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + camera_awb_mode_t whiteBalanceMode = OHOS_CAMERA_AWB_MODE_OFF; + auto itr = fwkWhiteBalanceModeMap_.find(mode); + if (itr == fwkWhiteBalanceModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetWhiteBalanceMode Unknown exposure mode"); + } else { + whiteBalanceMode = itr->second; + } + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetWhiteBalanceMode WhiteBalance mode: %{public}d", whiteBalanceMode); + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_AWB_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_AWB_MODE, &whiteBalanceMode, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_AWB_MODE, &whiteBalanceMode, count); + } + // no manual wb mode need set maunual value to 0 + if (mode != AWB_MODE_OFF) { + SetManualWhiteBalance(0); + } + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetWhiteBalanceMode Failed to set WhiteBalance mode"); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetWhiteBalanceMode(WhiteBalanceMode &mode) +{ + mode = AWB_MODE_OFF; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetWhiteBalanceMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetWhiteBalanceMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_AWB_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetWhiteBalanceMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaWhiteBalanceModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaWhiteBalanceModeMap_.end()) { + mode = itr->second; + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} + +// manual white balance +int32_t ProfessionSession::GetManualWhiteBalanceRange(std::vector &whiteBalanceRange) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalanceRange Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalanceRange camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SENSOR_WB_VALUES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalanceRange Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + + for (uint32_t i = 0; i < item.count; i++) { + whiteBalanceRange.emplace_back(item.data.i32[i]); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsManualWhiteBalanceSupported(bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsManualWhiteBalanceSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector whiteBalanceRange; + this->GetManualWhiteBalanceRange(whiteBalanceRange); + constexpr int32_t rangeSize = 2; + isSupported = (whiteBalanceRange.size() == rangeSize); + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetManualWhiteBalance(int32_t wbValue) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + WhiteBalanceMode mode; + GetWhiteBalanceMode(mode); + //WhiteBalanceMode::OFF + if (mode != WhiteBalanceMode::AWB_MODE_OFF) { + MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance Need to set WhiteBalanceMode off"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + bool status = false; + int32_t minIndex = 0; + int32_t maxIndex = 1; + int32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetManualWhiteBalance white balance: %{public}d", wbValue); + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance camera device is null"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + std::vector whiteBalanceRange; + this->GetManualWhiteBalanceRange(whiteBalanceRange); + if (whiteBalanceRange.empty()) { + MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance Bias range is empty"); + return CameraErrorCode::OPERATION_NOT_ALLOWED; + } + + if (wbValue != 0 && wbValue < whiteBalanceRange[minIndex]) { + MEDIA_DEBUG_LOG("ProfessionSession::SetManualWhiteBalance wbValue:" + "%{public}d is lesser than minimum wbValue: %{public}d", wbValue, whiteBalanceRange[minIndex]); + wbValue = whiteBalanceRange[minIndex]; + } else if (wbValue > whiteBalanceRange[maxIndex]) { + MEDIA_DEBUG_LOG("ProfessionSession::SetManualWhiteBalance wbValue: " + "%{public}d is greater than maximum wbValue: %{public}d", wbValue, whiteBalanceRange[maxIndex]); + wbValue = whiteBalanceRange[maxIndex]; + } + + int32_t ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SENSOR_WB_VALUE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_SENSOR_WB_VALUE, &wbValue, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_SENSOR_WB_VALUE, &wbValue, count); + } + if (!status) { + MEDIA_ERR_LOG("SetManualWhiteBalance Failed to SetManualWhiteBalance"); + } + return CameraErrorCode::SUCCESS; +} + + +int32_t ProfessionSession::GetManualWhiteBalance(int32_t &wbValue) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalance Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalance camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_SENSOR_WB_VALUE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetManualWhiteBalance Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + if (item.count != 0) { + wbValue = item.data.i32[0]; + } + return CameraErrorCode::SUCCESS; +} + +// Exposure Hint +int32_t ProfessionSession::GetSupportedExposureHintModes(std::vector &supportedExposureHintModes) +{ + supportedExposureHintModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedExposureHintModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedExposureHintModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_EXPOSURE_HINT_SUPPORTED, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedExposureHintModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaExposureHintModeMap_.find(static_cast(item.data.u8[i])); + if (itr != metaExposureHintModeMap_.end()) { + supportedExposureHintModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetExposureHintMode(ExposureHintMode mode) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetExposureHintMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetExposureHintMode Need to call LockForControl() " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + uint8_t exposureHintMode = OHOS_CAMERA_EXPOSURE_HINT_UNSUPPORTED; + auto itr = fwkExposureHintModeMap_.find(mode); + if (itr == fwkExposureHintModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetExposureHintMode Unknown mode"); + } else { + exposureHintMode = itr->second; + } + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetExposureHintMode ExposureHint mode: %{public}d", exposureHintMode); + + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_EXPOSURE_HINT_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_EXPOSURE_HINT_MODE, &exposureHintMode, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_EXPOSURE_HINT_MODE, &exposureHintMode, count); + } + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetExposureHintMode Failed to set ExposureHint mode"); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetExposureHintMode(ExposureHintMode &mode) +{ + mode = EXPOSURE_HINT_UNSUPPORTED; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetExposureHintMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetExposureHintMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_EXPOSURE_HINT_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetExposureHintMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaExposureHintModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaExposureHintModeMap_.end()) { + mode = itr->second; + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} +// Focus Flash Assist +int32_t ProfessionSession::GetSupportedFocusAssistFlashModes( + std::vector &supportedFocusAssistFlashModes) +{ + supportedFocusAssistFlashModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusAssistFlashModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusAssistFlashModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FOCUS_ASSIST_FLASH_SUPPORTED_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFocusAssistFlashModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaFocusAssistFlashModeMap_.find( + static_cast(item.data.u8[i])); + if (itr != metaFocusAssistFlashModeMap_.end()) { + supportedFocusAssistFlashModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsFocusAssistFlashModeSupported(FocusAssistFlashMode mode, bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsFocusModeSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedFocusAssistFlashModeList; + (void)this->GetSupportedFocusAssistFlashModes(vecSupportedFocusAssistFlashModeList); + if (find(vecSupportedFocusAssistFlashModeList.begin(), vecSupportedFocusAssistFlashModeList.end(), + mode) != vecSupportedFocusAssistFlashModeList.end()) { + isSupported = true; + return CameraErrorCode::SUCCESS; + } + isSupported = false; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetFocusAssistFlashMode(FocusAssistFlashMode mode) +{ + CAMERA_SYNC_TRACE; + MEDIA_DEBUG_LOG("ProfessionSession::SetFocusAssistFlashMode app mode: %{public}d", static_cast(mode)); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusAssistFlashMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusAssistFlashMode Need to call LockForControl " + "before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + uint8_t value = OHOS_CAMERA_FOCUS_ASSIST_FLASH_MODE_DEFAULT; + auto itr = fwkFocusAssistFlashModeMap_.find(mode); + if (itr == fwkFocusAssistFlashModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusAssistFlashMode Unknown exposure mode"); + } else { + value = itr->second; + } + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetFocusAssistFlashMode FocusAssistFlash mode: %{public}d", value); + ret = Camera::FindCameraMetadataItem( + changedMetadata_->get(), OHOS_CONTROL_FOCUS_ASSIST_FLASH_SUPPORTED_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_FOCUS_ASSIST_FLASH_SUPPORTED_MODE, &value, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_FOCUS_ASSIST_FLASH_SUPPORTED_MODE, &value, count); + } + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetFocusAssistFlashMode Failed to set FocusAssistFlash mode"); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetFocusAssistFlashMode(FocusAssistFlashMode &mode) +{ + mode = FOCUS_ASSIST_FLASH_MODE_DEFAULT; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusAssistFlashMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusAssistFlashMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FOCUS_ASSIST_FLASH_SUPPORTED_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetFocusAssistFlashMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaFocusAssistFlashModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaFocusAssistFlashModeMap_.end()) { + mode = itr->second; + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} + +// flash mode +int32_t ProfessionSession::GetSupportedFlashModes(std::vector &supportedFlashModes) +{ + supportedFlashModes.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFlashModes Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFlashModes camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_FLASH_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedFlashModes Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaFlashModeMap_.find(static_cast(item.data.u8[i])); + if (itr != metaFlashModeMap_.end()) { + supportedFlashModes.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetFlashMode(FlashMode &flashMode) +{ + flashMode = FLASH_MODE_CLOSE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::GetFlashMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetFlashMode camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_FLASH_MODE, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetFlashMode Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaFlashModeMap_.find(static_cast(item.data.u8[0])); + if (itr != metaFlashModeMap_.end()) { + flashMode = itr->second; + return CameraErrorCode::SUCCESS; + } + + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetFlashMode(FlashMode flashMode) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::SetFlashMode Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetFlashMode Need to call LockForControl() before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + uint8_t flash = fwkFlashModeMap_.at(FLASH_MODE_CLOSE); + auto itr = fwkFlashModeMap_.find(flashMode); + if (itr == fwkFlashModeMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetExposureMode Unknown exposure mode"); + } else { + flash = itr->second; + } + + bool status = false; + uint32_t count = 1; + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_FLASH_MODE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_FLASH_MODE, &flash, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_FLASH_MODE, &flash, count); + } + + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetFlashMode Failed to set flash mode"); + return CameraErrorCode::SUCCESS; + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::IsFlashModeSupported(FlashMode flashMode, bool &isSupported) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::IsFlashModeSupported Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedFlashModeList; + (void)this->GetSupportedFlashModes(vecSupportedFlashModeList); + if (find(vecSupportedFlashModeList.begin(), vecSupportedFlashModeList.end(), flashMode) != + vecSupportedFlashModeList.end()) { + isSupported = true; + return CameraErrorCode::SUCCESS; + } + isSupported = false; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::HasFlash(bool &hasFlash) +{ + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("ProfessionSession::HasFlash Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + std::vector vecSupportedFlashModeList; + (void)this->GetSupportedFlashModes(vecSupportedFlashModeList); + if (vecSupportedFlashModeList.empty()) { + hasFlash = false; + return CameraErrorCode::SUCCESS; + } + hasFlash = true; + return CameraErrorCode::SUCCESS; +} +// XMAGE + +int32_t ProfessionSession::GetSupportedColorEffects(std::vector& supportedColorEffects) +{ + supportedColorEffects.clear(); + if (!(IsSessionCommited() || IsSessionConfiged())) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedColorEffects Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedColorEffects camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_SUPPORTED_COLOR_MODES, &item); + if (ret != CAM_META_SUCCESS) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedColorEffects Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + for (uint32_t i = 0; i < item.count; i++) { + auto itr = metaColorEffectMap_.find(static_cast(item.data.u8[i])); + if (itr != metaColorEffectMap_.end()) { + supportedColorEffects.emplace_back(itr->second); + } + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetColorEffect(ColorEffect& colorEffect) +{ + colorEffect = ColorEffect::COLOR_EFFECT_NORMAL; + if (!(IsSessionCommited() || IsSessionConfiged())) { + MEDIA_ERR_LOG("ProfessionSession::GetColorEffect Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("ProfessionSession::GetColorEffect camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_SUPPORTED_COLOR_MODES, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("ProfessionSession::GetColorEffect Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + auto itr = metaColorEffectMap_.find(static_cast(item.data.u8[0])); + if (itr != metaColorEffectMap_.end()) { + colorEffect = itr->second; + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetColorEffect(ColorEffect colorEffect) +{ + CAMERA_SYNC_TRACE; + if (!(IsSessionCommited() || IsSessionConfiged())) { + MEDIA_ERR_LOG("ProfessionSession::SetColorEffect Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::SetColorEffect Need to call LockForControl before setting camera properties"); + return CameraErrorCode::SUCCESS; + } + uint8_t colorEffectTemp = ColorEffect::COLOR_EFFECT_NORMAL; + auto itr = fwkColorEffectMap_.find(colorEffect); + if (itr == fwkColorEffectMap_.end()) { + MEDIA_ERR_LOG("ProfessionSession::SetColorEffect unknown is color effect"); + } else { + colorEffectTemp = itr->second; + } + + bool status = false; + int32_t ret; + uint32_t count = 1; + camera_metadata_item_t item; + MEDIA_DEBUG_LOG("ProfessionSession::SetColorEffect: %{public}d", colorEffect); + ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SUPPORTED_COLOR_MODES, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_SUPPORTED_COLOR_MODES, &colorEffectTemp, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_SUPPORTED_COLOR_MODES, &colorEffectTemp, count); + } + + if (!status) { + MEDIA_ERR_LOG("ProfessionSession::SetColorEffect Failed to set color effect"); + } + return CameraErrorCode::SUCCESS; +} + +bool ProfessionSession::CanAddOutput(sptr &output, SceneMode modeName) +{ + CAMERA_SYNC_TRACE; + MEDIA_DEBUG_LOG("Enter Into ProfessionSession::CanAddOutput"); + if (!IsSessionConfiged() || output == nullptr) { + MEDIA_ERR_LOG("ProfessionSession::CanAddOutput operation is Not allowed!"); + return false; + } + return CaptureSession::CanAddOutput(output); +} + +// apertures +int32_t ProfessionSession::GetSupportedPhysicalApertures(std::vector>& supportedPhysicalApertures) +{ + // The data structure of the supportedPhysicalApertures object is { {zoomMin, zoomMax, + // physicalAperture1, physicalAperture2ยทยทยท}, }. + supportedPhysicalApertures.clear(); + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures camera device is null"); + return CameraErrorCode::SUCCESS; + } + + std::shared_ptr metadata = GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_ABILITY_CAMERA_PHYSICAL_APERTURE_RANGE, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + std::vector allRange = {}; + for (uint32_t i = 0; i < item.count; i++) { + allRange.push_back(ConfusingNumber(item.data.f[i])); + } + MEDIA_DEBUG_LOG("ProfessionSession::GetSupportedPhysicalApertures mode %{public}d, allRange=%{public}s", + GetMode(), Container2String(allRange.begin(), allRange.end()).c_str()); + float npos = -1.0; + std::vector> modeRanges = {}; + + std::vector modeRange = {}; + + for (uint32_t i = 0; i < item.count - 1; i++) { + if (item.data.f[i] == npos && item.data.f[i + 1] == npos) { + modeRange.emplace_back(npos); + modeRanges.emplace_back(std::move(modeRange)); + modeRange.clear(); + i++; + continue; + } + modeRange.emplace_back(item.data.f[i]); + } + float currentMode = static_cast(GetMode()); + auto it = std::find_if(modeRanges.begin(), modeRanges.end(), + [currentMode](auto value) -> bool { + return currentMode == value[0]; + }); + if (it == modeRanges.end()) { + MEDIA_ERR_LOG("ProfessionSession::GetSupportedPhysicalApertures Failed meta not support mode:%{public}d", + GetMode()); + return CameraErrorCode::SUCCESS; + } + + auto chooseModeRange = *it; + int32_t deviceCntPos = 1; + int32_t supportedDeviceCount = static_cast(chooseModeRange[deviceCntPos]); + if (supportedDeviceCount == 0) { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures Failed meta device count is 0"); + return CameraErrorCode::SUCCESS; + } + std::vector tempPhysicalApertures = {}; + for (uint32_t i = 2; i < chooseModeRange.size(); i++) { + if (chooseModeRange[i] == -1) { + supportedPhysicalApertures.emplace_back(tempPhysicalApertures); + vector().swap(tempPhysicalApertures); + continue; + } + tempPhysicalApertures.emplace_back(chooseModeRange[i]); + } + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::GetPhysicalAperture(float& physicalAperture) +{ + physicalAperture = 0.0; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("GetPhysicalAperture Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { + MEDIA_ERR_LOG("GetPhysicalAperture camera device is null"); + return CameraErrorCode::SUCCESS; + } + std::shared_ptr metadata = inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + camera_metadata_item_t item; + int ret = Camera::FindCameraMetadataItem(metadata->get(), OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &item); + if (ret != CAM_META_SUCCESS || item.count == 0) { + MEDIA_ERR_LOG("GetPhysicalAperture Failed with return code %{public}d", ret); + return CameraErrorCode::SUCCESS; + } + physicalAperture = item.data.f[0]; + return CameraErrorCode::SUCCESS; +} + +int32_t ProfessionSession::SetPhysicalAperture(float physicalAperture) +{ + CAMERA_SYNC_TRACE; + if (!IsSessionCommited()) { + MEDIA_ERR_LOG("SetPhysicalAperture Session is not Commited"); + return CameraErrorCode::SESSION_NOT_CONFIG; + } + if (changedMetadata_ == nullptr) { + MEDIA_ERR_LOG("SetPhysicalAperture changedMetadata_ is NULL"); + return CameraErrorCode::SUCCESS; + } + MEDIA_DEBUG_LOG("ProfessionSession::SetPhysicalAperture physicalAperture = %{public}f", + ConfusingNumber(physicalAperture)); + std::vector> physicalApertures; + GetSupportedPhysicalApertures(physicalApertures); + // physicalApertures size is one, means not support change + if (physicalApertures.size() == 1) { + MEDIA_ERR_LOG("SetPhysicalAperture not support"); + return CameraErrorCode::SUCCESS; + } + // accurately currentZoomRatio need smoothing zoom done + float currentZoomRatio = GetZoomRatio(); + int zoomMinIndex = 0; + int zoomMaxIndex = 1; + auto it = std::find_if(physicalApertures.begin(), physicalApertures.end(), + [¤tZoomRatio, &zoomMinIndex, &zoomMaxIndex](const std::vector physicalApertureRange) { + return physicalApertureRange[zoomMaxIndex] > currentZoomRatio >= physicalApertureRange[zoomMinIndex]; + }); + float autoAperture = 0.0; + if ((physicalAperture != autoAperture) && (it == physicalApertures.end())) { + MEDIA_ERR_LOG("current zoomRatio not supported in physical apertures zoom ratio"); + return CameraErrorCode::SUCCESS; + } + int physicalAperturesIndex = 2; + auto res = std::find_if(std::next((*it).begin(), physicalAperturesIndex), (*it).end(), + [&physicalAperture](const float physicalApertureTemp) { + return FloatIsEqual(physicalAperture, physicalApertureTemp); + }); + if ((physicalAperture != autoAperture) && res == (*it).end()) { + MEDIA_ERR_LOG("current physicalAperture is not supported"); + return CameraErrorCode::SUCCESS; + } + uint32_t count = 1; + bool status = false; + camera_metadata_item_t item; + int32_t ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), + OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = changedMetadata_->addEntry(OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &physicalAperture, count); + } else if (ret == CAM_META_SUCCESS) { + status = changedMetadata_->updateEntry(OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &physicalAperture, count); + } + if (!status) { + MEDIA_ERR_LOG("SetPhysicalAperture Failed to set physical aperture"); + } + apertureValue_ = physicalAperture; + return CameraErrorCode::SUCCESS; +} +//callbacks +void ProfessionSession::SetExposureInfoCallback(std::shared_ptr callback) +{ + std::lock_guard lock(sessionCallbackMutex_); + exposureInfoCallback_ = callback; +} + +void ProfessionSession::SetIsoInfoCallback(std::shared_ptr callback) +{ + std::lock_guard lock(sessionCallbackMutex_); + isoInfoCallback_ = callback; +} + +void ProfessionSession::SetApertureInfoCallback(std::shared_ptr callback) +{ + std::lock_guard lock(sessionCallbackMutex_); + apertureInfoCallback_ = callback; +} + +void ProfessionSession::SetLuminationInfoCallback(std::shared_ptr callback) +{ + std::lock_guard lock(sessionCallbackMutex_); + luminationInfoCallback_ = callback; +} + +void ProfessionSession::ProcessSensorExposureTimeChange(const std::shared_ptr &result) +{ + camera_metadata_item_t item; + common_metadata_header_t* metadata = result->get(); + int ret = Camera::FindCameraMetadataItem(metadata, OHOS_STATUS_SENSOR_EXPOSURE_TIME, &item); + if (ret == CAM_META_SUCCESS) { + int32_t numerator = item.data.r->numerator; + int32_t denominator = item.data.r->denominator; + MEDIA_DEBUG_LOG("SensorExposureTime: %{public}d/%{public}d", numerator, denominator); + if (denominator == 0) { + MEDIA_ERR_LOG("ProcessSensorExposureTimeChange error! divide by zero"); + return; + } + constexpr int32_t timeUnit = 1000000; + uint32_t value = numerator / (denominator / timeUnit); + MEDIA_DEBUG_LOG("SensorExposureTime: %{public}d", value); + ExposureInfo info = { + .exposureDurationValue = value, + }; + std::lock_guard lock(sessionCallbackMutex_); + if (exposureInfoCallback_ != nullptr && (value != exposureDurationValue_)) { + if (exposureDurationValue_ != 0) { + exposureInfoCallback_->OnExposureInfoChanged(info); + } + exposureDurationValue_ = value; + } + } +} + +void ProfessionSession::ProcessIsoChange(const std::shared_ptr &result) +{ + camera_metadata_item_t item; + common_metadata_header_t* metadata = result->get(); + int ret = Camera::FindCameraMetadataItem(metadata, OHOS_STATUS_ISO_VALUE, &item); + if (ret == CAM_META_SUCCESS) { + MEDIA_DEBUG_LOG("Iso Value: %{public}d", item.data.ui32[0]); + IsoInfo info = { + .isoValue = item.data.ui32[0], + }; + std::lock_guard lock(sessionCallbackMutex_); + if (isoInfoCallback_ != nullptr && item.data.ui32[0] != isoValue_) { + if (isoValue_ != 0) { + isoInfoCallback_->OnIsoInfoChanged(info); + } + isoValue_ = item.data.ui32[0]; + } + } +} + +void ProfessionSession::ProcessApertureChange(const std::shared_ptr &result) +{ + camera_metadata_item_t item; + common_metadata_header_t* metadata = result->get(); + int ret = Camera::FindCameraMetadataItem(metadata, OHOS_STATUS_CAMERA_APERTURE_VALUE, &item); + if (ret == CAM_META_SUCCESS) { + MEDIA_DEBUG_LOG("aperture Value: %{public}f", ConfusingNumber(item.data.f[0])); + std::lock_guard lock(sessionCallbackMutex_); + ApertureInfo info = { + .apertureValue = item.data.f[0], + }; + if (apertureInfoCallback_ != nullptr && (item.data.f[0] != apertureValue_ || apertureValue_ == 0)) { + apertureInfoCallback_->OnApertureInfoChanged(info); + apertureValue_ = item.data.f[0]; + } + } +} + +void ProfessionSession::ProcessLuminationChange(const std::shared_ptr &result) +{ + constexpr float normalizedMeanValue = 255.0; + camera_metadata_item_t item; + common_metadata_header_t* metadata = result->get(); + int ret = Camera::FindCameraMetadataItem(metadata, OHOS_STATUS_ALGO_MEAN_Y, &item); + float value = item.data.ui32[0] / normalizedMeanValue; + if (ret == CAM_META_SUCCESS) { + MEDIA_DEBUG_LOG("Lumination Value: %{public}f", value); + LuminationInfo info = { + .luminationValue = value, + }; + std::lock_guard lock(sessionCallbackMutex_); + if (luminationInfoCallback_ != nullptr && value != luminationValue_) { + luminationInfoCallback_->OnLuminationInfoChanged(info); + luminationValue_ = value; + } + } +} + +void ProfessionSession::ProcessPhysicalCameraSwitch(const std::shared_ptr& result) +{ + camera_metadata_item_t item; + common_metadata_header_t* metadata = result->get(); + int ret = Camera::FindCameraMetadataItem(metadata, OHOS_STATUS_PREVIEW_PHYSICAL_CAMERA_ID, &item); + if (ret != CAM_META_SUCCESS) { + return; + } + if (physicalCameraId_ != item.data.u8[0]) { + MEDIA_DEBUG_LOG("physicalCameraId: %{public}d", item.data.u8[0]); + physicalCameraId_ = item.data.u8[0]; + ExecuteAbilityChangeCallback(); + } +} + +std::shared_ptr ProfessionSession::GetMetadata() +{ + std::string phyCameraId = std::to_string(physicalCameraId_.load()); + auto physicalCameraDevice = std::find_if(supportedDevices_.begin(), supportedDevices_.end(), + [phyCameraId](const auto& device) -> bool { + std::string cameraId = device->GetID(); + size_t delimPos = cameraId.find("/"); + if (delimPos == std::string::npos) { + return false; + } + string id = cameraId.substr(delimPos + 1); + return id.compare(phyCameraId) == 0; + }); + if (physicalCameraDevice != supportedDevices_.end()) { + MEDIA_DEBUG_LOG("ProfessionSession::GetMetadata physicalCameraId: device/%{public}s", phyCameraId.c_str()); + if ((*physicalCameraDevice)->GetCameraType() == CAMERA_TYPE_WIDE_ANGLE && + photoProfile_.GetCameraFormat() != CAMERA_FORMAT_DNG) { + MEDIA_DEBUG_LOG("ProfessionSession::GetMetadata using main sensor: %{public}s", + inputDevice_->GetCameraDeviceInfo()->GetID().c_str()); + return inputDevice_->GetCameraDeviceInfo()->GetMetadata(); + } + return (*physicalCameraDevice)->GetMetadata(); + } + MEDIA_DEBUG_LOG("ProfessionSession::GetMetadata no physicalCamera, using current camera device:%{public}s", + inputDevice_->GetCameraDeviceInfo()->GetID().c_str()); + return inputDevice_->GetCameraDeviceInfo()->GetMetadata(); +} + +void ProfessionSession::ProfessionSessionMetadataResultProcessor::ProcessCallbacks( + const uint64_t timestamp, const std::shared_ptr& result) +{ + auto session = session_.promote(); + if (session == nullptr) { + MEDIA_ERR_LOG("CaptureSession::ProfessionSessionMetadataResultProcessor ProcessCallbacks but session is null"); + return; + } + + session->ProcessFaceRecUpdates(timestamp, result); + session->ProcessAutoFocusUpdates(result); + session->ProcessSensorExposureTimeChange(result); + session->ProcessIsoChange(result); + session->ProcessApertureChange(result); + session->ProcessLuminationChange(result); + session->ProcessPhysicalCameraSwitch(result); +} +} // CameraStandard +} // OHOS diff --git a/frameworks/native/camera/test/moduletest/include/camera_framework_moduletest.h b/frameworks/native/camera/test/moduletest/include/camera_framework_moduletest.h index 74b6fcaad..4e66b0e03 100644 --- a/frameworks/native/camera/test/moduletest/include/camera_framework_moduletest.h +++ b/frameworks/native/camera/test/moduletest/include/camera_framework_moduletest.h @@ -30,6 +30,12 @@ namespace OHOS { namespace CameraStandard { +typedef struct { + Profile preview; + Profile photo; + VideoProfile video; +} SelectProfiles; + class CameraFrameworkModuleTest : public testing::Test { public: static const int32_t PHOTO_DEFAULT_WIDTH = 1280; @@ -85,10 +91,12 @@ public: sptr CreatePhotoOutput(); sptr CreateVideoOutput(int32_t width, int32_t height); sptr CreateVideoOutput(); + sptr CreateVideoOutput(VideoProfile& profile); sptr CreatePhotoOutput(Profile profile); void GetSupportedOutputCapability(); Profile SelectProfileByRatioAndFormat(sptr& modeAbility, float ratio, CameraFormat format); + SelectProfiles SelectWantedProfiles(sptr& modeAbility, const SelectProfiles wanted); void ConfigScanSession(sptr &previewOutput_1, sptr &previewOutput_2); void ReleaseInput(); diff --git a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp index 5e00f0f24..695e07241 100644 --- a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp +++ b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp @@ -89,6 +89,7 @@ enum class CAM_MOON_CAPTURE_BOOST_EVENTS { }; const int32_t WAIT_TIME_AFTER_CAPTURE = 1; +const int32_t WAIT_TIME_AFTER_RECORDING = 3; const int32_t WAIT_TIME_AFTER_START = 2; const int32_t WAIT_TIME_BEFORE_STOP = 1; const int32_t WAIT_TIME_AFTER_CLOSE = 1; @@ -517,6 +518,23 @@ sptr CameraFrameworkModuleTest::CreateVideoOutput(int32_t width, return videoOutput; } +sptr CameraFrameworkModuleTest::CreateVideoOutput(VideoProfile& videoProfile) +{ + sptr surface = IConsumerSurface::Create(); + sptr videoSurfaceListener = + new (std::nothrow) SurfaceListener("Video", SurfaceType::VIDEO, g_videoFd, surface); + surface->RegisterConsumerListener((sptr&)videoSurfaceListener); + if (videoSurfaceListener == nullptr) { + MEDIA_ERR_LOG("Failed to create new SurfaceListener"); + return nullptr; + } + sptr videoProducer = surface->GetProducer(); + sptr videoSurface = Surface::CreateSurfaceAsProducer(videoProducer); + sptr videoOutput = nullptr; + videoOutput = manager_->CreateVideoOutput(videoProfile, videoSurface); + return videoOutput; +} + sptr CameraFrameworkModuleTest::CreateVideoOutput() { sptr videoOutput = CreateVideoOutput(videoWidth_, videoHeight_); @@ -614,6 +632,28 @@ Profile CameraFrameworkModuleTest::SelectProfileByRatioAndFormat(sptr& modeAbility, const SelectProfiles wanted) +{ + SelectProfiles ret; + const auto& preview = std::find_if(modeAbility->GetPreviewProfiles().begin(), modeAbility->GetPreviewProfiles().end(), + [&wanted](auto& profile) { return profile == wanted.preview; }); + if (preview != modeAbility->GetPreviewProfiles().end()) { + ret.preview = *preview; + } + const auto& photo = std::find_if(modeAbility->GetPhotoProfiles().begin(), modeAbility->GetPhotoProfiles().end(), + [&wanted](auto& profile) { return profile == wanted.photo; }); + if (photo != modeAbility->GetPhotoProfiles().end()) { + ret.photo = *photo; + } + const auto& video = std::find_if(modeAbility->GetVideoProfiles().begin(), modeAbility->GetVideoProfiles().end(), + [&wanted](auto& profile) { return profile == wanted.video; }); + if (video != modeAbility->GetVideoProfiles().end()) { + ret.video = *video; + } + return ret; +} + void CameraFrameworkModuleTest::ReleaseInput() { if (input_) { @@ -2849,6 +2889,445 @@ HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_048, TestSize.Le portraitSession->Stop(); } +/* Feature: Framework + * Function: Test preview/video with profession session + * SubFunction: NA + * FunctionPoints: NA + * EnvConditions: NA + * CaseDescription: Test preview/video with profession session + */ +HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_071, TestSize.Level0) +{ + SceneMode sceneMode = SceneMode::PROFESSIONAL_VIDEO; + if (!IsSupportMode(sceneMode)) { + return; + } + sptr cameraManagerObj = CameraManager::GetInstance(); + ASSERT_NE(cameraManagerObj, nullptr); + + std::vector sceneModes = cameraManagerObj->GetSupportedModes(cameras_[0]); + ASSERT_TRUE(sceneModes.size() != 0); + + sptr modeAbility = + cameraManagerObj->GetSupportedOutputCapability(cameras_[0], sceneMode); + ASSERT_NE(modeAbility, nullptr); + + SelectProfiles wanted; + wanted.preview.size_ = {640,480}; + wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.size_ = {640,480}; + wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.framerates_ = {30,30}; + + SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); + ASSERT_NE(profiles.preview.format_, -1); + ASSERT_NE(profiles.video.format_, -1); + + sptr captureSession = cameraManagerObj->CreateCaptureSession(sceneMode); + ASSERT_NE(captureSession, nullptr); + sptr session = static_cast(captureSession.GetRefPtr()); + ASSERT_NE(session, nullptr); + + int32_t intResult = session->BeginConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->AddInput(input_); + EXPECT_EQ(intResult, 0); + + sptr previewOutput = CreatePreviewOutput(profiles.preview); + ASSERT_NE(previewOutput, nullptr); + + intResult = session->AddOutput(previewOutput); + EXPECT_EQ(intResult, 0); + + sptr videoOutput = CreateVideoOutput(profiles.video); + ASSERT_NE(videoOutput, nullptr); + + intResult = session->AddOutput(videoOutput); + EXPECT_EQ(intResult, 0); + + + intResult = session->CommitConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_START); + + intResult = ((sptr&)videoOutput)->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_RECORDING); + intResult = ((sptr&)videoOutput)->Stop(); + session->Stop(); +} + +/* Feature: Framework + * Function: Test profession session metering mode + * SubFunction: NA + * FunctionPoints: NA + * EnvConditions: NA + * CaseDescription: Test profession session metering mode + */ +HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_072, TestSize.Level0) +{ + SceneMode sceneMode = SceneMode::PROFESSIONAL_VIDEO; + if (!IsSupportMode(sceneMode)) { + return; + } + sptr cameraManagerObj = CameraManager::GetInstance(); + ASSERT_NE(cameraManagerObj, nullptr); + + std::vector sceneModes = cameraManagerObj->GetSupportedModes(cameras_[0]); + ASSERT_TRUE(sceneModes.size() != 0); + + sptr modeAbility = + cameraManagerObj->GetSupportedOutputCapability(cameras_[0], sceneMode); + ASSERT_NE(modeAbility, nullptr); + + SelectProfiles wanted; + wanted.preview.size_ = {640,480}; + wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.size_ = {640,480}; + wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.framerates_ = {30,30}; + + SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); + ASSERT_NE(profiles.preview.format_, -1); + ASSERT_NE(profiles.video.format_, -1); + + sptr captureSession = cameraManagerObj->CreateCaptureSession(sceneMode); + ASSERT_NE(captureSession, nullptr); + sptr session = static_cast(captureSession.GetRefPtr()); + ASSERT_NE(session, nullptr); + + int32_t intResult = session->BeginConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->AddInput(input_); + EXPECT_EQ(intResult, 0); + + sptr previewOutput = CreatePreviewOutput(profiles.preview); + ASSERT_NE(previewOutput, nullptr); + + intResult = session->AddOutput(previewOutput); + EXPECT_EQ(intResult, 0); + + sptr videoOutput = CreateVideoOutput(profiles.video); + ASSERT_NE(videoOutput, nullptr); + + intResult = session->AddOutput(videoOutput); + EXPECT_EQ(intResult, 0); + + + intResult = session->CommitConfig(); + EXPECT_EQ(intResult, 0); + std::vector modes = {}; + intResult = session->GetSupportedMeteringModes(modes); + EXPECT_EQ(intResult, 0); + EXPECT_NE(modes.size(), 0); + + intResult = session->SetMeteringMode(modes[0]); + EXPECT_EQ(intResult, 0); + MeteringMode meteringMode = METERING_MODE_CENTER_WEIGHTED; + intResult = session->GetMeteringMode(meteringMode); + EXPECT_EQ(intResult, 0); + + EXPECT_EQ(meteringMode, modes[0]); + intResult = session->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_START); + + intResult = ((sptr&)videoOutput)->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_RECORDING); + intResult = ((sptr&)videoOutput)->Stop(); + session->Stop(); +} + +/* Feature: Framework + * Function: Test profession session focus Assist flash mode + * SubFunction: NA + * FunctionPoints: NA + * EnvConditions: NA + * CaseDescription: Test profession session focus Assist flash mode + */ +HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_073, TestSize.Level0) +{ + SceneMode sceneMode = SceneMode::PROFESSIONAL_VIDEO; + if (!IsSupportMode(sceneMode)) { + return; + } + sptr cameraManagerObj = CameraManager::GetInstance(); + ASSERT_NE(cameraManagerObj, nullptr); + + std::vector sceneModes = cameraManagerObj->GetSupportedModes(cameras_[0]); + ASSERT_TRUE(sceneModes.size() != 0); + + sptr modeAbility = + cameraManagerObj->GetSupportedOutputCapability(cameras_[0], sceneMode); + ASSERT_NE(modeAbility, nullptr); + + SelectProfiles wanted; + wanted.preview.size_ = {640, 480}; + wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.size_ = {640, 480}; + wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.framerates_ = {30, 30}; + + SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); + ASSERT_NE(profiles.preview.format_, -1); + ASSERT_NE(profiles.video.format_, -1); + + sptr captureSession = cameraManagerObj->CreateCaptureSession(sceneMode); + ASSERT_NE(captureSession, nullptr); + sptr session = static_cast(captureSession.GetRefPtr()); + ASSERT_NE(session, nullptr); + + int32_t intResult = session->BeginConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->AddInput(input_); + EXPECT_EQ(intResult, 0); + + sptr previewOutput = CreatePreviewOutput(profiles.preview); + ASSERT_NE(previewOutput, nullptr); + + intResult = session->AddOutput(previewOutput); + EXPECT_EQ(intResult, 0); + + sptr videoOutput = CreateVideoOutput(profiles.video); + ASSERT_NE(videoOutput, nullptr); + + intResult = session->AddOutput(videoOutput); + EXPECT_EQ(intResult, 0); + + + intResult = session->CommitConfig(); + EXPECT_EQ(intResult, 0); + std::vector modes = {}; + intResult = session->GetSupportedFocusAssistFlashModes(modes); + EXPECT_EQ(intResult, 0); + EXPECT_NE(modes.size(), 0); + + intResult = session->SetFocusAssistFlashMode(modes[0]); + EXPECT_EQ(intResult, 0); + FocusAssistFlashMode mode; + intResult = session->GetFocusAssistFlashMode(mode); + EXPECT_EQ(intResult, 0); + + EXPECT_EQ(mode, modes[0]); + intResult = session->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_START); + + intResult = ((sptr&)videoOutput)->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_RECORDING); + intResult = ((sptr&)videoOutput)->Stop(); + session->Stop(); +} + +/* Feature: Framework + * Function: Test profession session exposure hint mode + * SubFunction: NA + * FunctionPoints: NA + * EnvConditions: NA + * CaseDescription: Test profession session exposure hint mode + */ +HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_074, TestSize.Level0) +{ + SceneMode sceneMode = SceneMode::PROFESSIONAL_VIDEO; + if (!IsSupportMode(sceneMode)) { + return; + } + sptr cameraManagerObj = CameraManager::GetInstance(); + ASSERT_NE(cameraManagerObj, nullptr); + + std::vector sceneModes = cameraManagerObj->GetSupportedModes(cameras_[0]); + ASSERT_TRUE(sceneModes.size() != 0); + + sptr modeAbility = + cameraManagerObj->GetSupportedOutputCapability(cameras_[0], sceneMode); + ASSERT_NE(modeAbility, nullptr); + + SelectProfiles wanted; + wanted.preview.size_ = {640,480}; + wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.size_ = {640,480}; + wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.framerates_ = {30,30}; + + SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); + ASSERT_NE(profiles.preview.format_, -1); + ASSERT_NE(profiles.video.format_, -1); + + sptr captureSession = cameraManagerObj->CreateCaptureSession(sceneMode); + ASSERT_NE(captureSession, nullptr); + sptr session = static_cast(captureSession.GetRefPtr()); + ASSERT_NE(session, nullptr); + + int32_t intResult = session->BeginConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->AddInput(input_); + EXPECT_EQ(intResult, 0); + + sptr previewOutput = CreatePreviewOutput(profiles.preview); + ASSERT_NE(previewOutput, nullptr); + + intResult = session->AddOutput(previewOutput); + EXPECT_EQ(intResult, 0); + + sptr videoOutput = CreateVideoOutput(profiles.video); + ASSERT_NE(videoOutput, nullptr); + + intResult = session->AddOutput(videoOutput); + EXPECT_EQ(intResult, 0); + + + intResult = session->CommitConfig(); + EXPECT_EQ(intResult, 0); + std::vector modes = {}; + intResult = session->GetSupportedExposureHintModes(modes); + EXPECT_EQ(intResult, 0); + EXPECT_NE(modes.size(), 0); + + session->LockForControl(); + intResult = session->SetExposureHintMode(modes[0]); + EXPECT_EQ(intResult, 0); + session->UnlockForControl(); + + ExposureHintMode mode; + intResult = session->GetExposureHintMode(mode); + EXPECT_EQ(intResult, 0); + + EXPECT_EQ(mode, modes[0]); + intResult = session->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_START); + + intResult = ((sptr&)videoOutput)->Start(); + EXPECT_EQ(intResult, 0); + sleep(WAIT_TIME_AFTER_RECORDING); + intResult = ((sptr&)videoOutput)->Stop(); + session->Stop(); +} +/* +* Feature: Framework +* Function: Test manual_iso_props && auto_awb_props && manual_awb_props +* SubFunction: NA +* FunctionPoints: NA +* EnvConditions: NA +* CaseDescription: test manual_iso_props && auto_awb_props && manual_awb_props +*/ +HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_075, TestSize.Level0) +{ + SceneMode sceneMode = SceneMode::PROFESSIONAL_VIDEO; + if (!IsSupportMode(sceneMode)) { + return; + } + sptr modeManagerObj = CameraManager::GetInstance(); + ASSERT_NE(modeManagerObj, nullptr); + + std::vector sceneModes = modeManagerObj->GetSupportedModes(cameras_[0]); + ASSERT_TRUE(sceneModes.size() != 0); + + sptr modeAbility = + modeManagerObj->GetSupportedOutputCapability(cameras_[0], sceneMode); + ASSERT_NE(modeAbility, nullptr); + + SelectProfiles wanted; + wanted.preview.size_ = {640,480}; + wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; + + wanted.video.size_ = {640,480}; + wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; + wanted.video.framerates_ = {30,30}; + + SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); + ASSERT_NE(profiles.preview.format_, -1); + ASSERT_NE(profiles.video.format_, -1); + + sptr captureSession = modeManagerObj->CreateCaptureSession(sceneMode); + ASSERT_NE(captureSession, nullptr); + + sptr session = static_cast(captureSession.GetRefPtr()); + ASSERT_NE(session, nullptr); + + int32_t intResult = session->BeginConfig(); + EXPECT_EQ(intResult, 0); + + intResult = session->AddInput(input_); + EXPECT_EQ(intResult, 0); + + sptr previewOutput = CreatePreviewOutput(profiles.preview); + ASSERT_NE(previewOutput, nullptr); + + intResult = session->AddOutput(previewOutput); + EXPECT_EQ(intResult, 0); + + sptr videoOutput = CreateVideoOutput(profiles.video); + ASSERT_NE(videoOutput, nullptr); + + intResult = session->AddOutput(videoOutput); + EXPECT_EQ(intResult, 0); + + intResult = session->CommitConfig(); + EXPECT_EQ(intResult, 0); + + bool isSupported = session->IsManualIsoSupported(); + if (isSupported) { + std::vector isoRange; + session->GetIsoRange(isoRange); + ASSERT_EQ(isoRange.empty(), false); + session->LockForControl(); + intResult = session->SetISO(isoRange[1]+1); + EXPECT_NE(intResult, 0); + session->UnlockForControl(); + + session->LockForControl(); + intResult = session->SetISO(isoRange[1]); + EXPECT_EQ(intResult, 0); + session->UnlockForControl(); + + int32_t iso; + session->GetISO(iso); + EXPECT_EQ(isoRange[1], iso); + } + + std::vector supportedWhiteBalanceModes; + session->GetSupportedWhiteBalanceModes(supportedWhiteBalanceModes); + if (!supportedWhiteBalanceModes.empty()) { + session->IsWhiteBalanceModeSupported(supportedWhiteBalanceModes[0], isSupported); + ASSERT_EQ(isSupported, true); + session->LockForControl(); + intResult = session->SetWhiteBalanceMode(supportedWhiteBalanceModes[0]); + ASSERT_EQ(isSupported, 0); + session->UnlockForControl(); + WhiteBalanceMode currentMode; + session->GetWhiteBalanceMode(currentMode); + ASSERT_EQ(currentMode, supportedWhiteBalanceModes[0]); + } + + session->IsManualWhiteBalanceSupported(isSupported); + std::vector whiteBalanceRange; + if (isSupported) { + session->GetManualWhiteBalanceRange(whiteBalanceRange); + ASSERT_EQ(whiteBalanceRange.size() == 2, true); + + session->LockForControl(); + intResult = session->SetManualWhiteBalance(whiteBalanceRange[0] - 1); + session->UnlockForControl(); + + int32_t wbValue; + session->GetManualWhiteBalance(wbValue); + ASSERT_EQ(wbValue, whiteBalanceRange[0]); + } else { + session->GetManualWhiteBalanceRange(whiteBalanceRange); + ASSERT_EQ(whiteBalanceRange.size() < 2, true); + } +} + /* * Feature: Framework * Function: Test Scan Session add output diff --git a/interfaces/inner_api/native/camera/include/input/camera_manager.h b/interfaces/inner_api/native/camera/include/input/camera_manager.h index a29eb9fb1..cdd21c653 100644 --- a/interfaces/inner_api/native/camera/include/input/camera_manager.h +++ b/interfaces/inner_api/native/camera/include/input/camera_manager.h @@ -29,6 +29,7 @@ #include "safe_map.h" #include "session/capture_session.h" #include "session/portrait_session.h" +#include "session/profession_session.h" #include "session/night_session.h" #include "session/scan_session.h" #include "session/video_session.h" diff --git a/interfaces/inner_api/native/camera/include/output/camera_output_capability.h b/interfaces/inner_api/native/camera/include/output/camera_output_capability.h index bd147c760..d5701e62e 100644 --- a/interfaces/inner_api/native/camera/include/output/camera_output_capability.h +++ b/interfaces/inner_api/native/camera/include/output/camera_output_capability.h @@ -41,6 +41,7 @@ enum CameraFormat { CAMERA_FORMAT_INVALID = -1, CAMERA_FORMAT_YCBCR_420_888 = 2, CAMERA_FORMAT_RGBA_8888 = 3, + CAMERA_FORMAT_DNG = 4, CAMERA_FORMAT_YUV_420_SP = 1003, CAMERA_FORMAT_NV12 = 1004, CAMERA_FORMAT_YUV_422_YUYV = 1005, @@ -97,7 +98,12 @@ public: VideoProfile(CameraFormat format, Size size, std::vector framerates); VideoProfile() = default; virtual ~VideoProfile() = default; - + VideoProfile& operator=(const VideoProfile& rhs) + { + Profile::operator=(rhs); + this->framerates_ = rhs.framerates_; + return *this; + } /** * @brief Get supported framerates of the profile. * diff --git a/interfaces/inner_api/native/camera/include/output/photo_output.h b/interfaces/inner_api/native/camera/include/output/photo_output.h index 7f6daf2ac..884bf976b 100644 --- a/interfaces/inner_api/native/camera/include/output/photo_output.h +++ b/interfaces/inner_api/native/camera/include/output/photo_output.h @@ -246,6 +246,13 @@ public: */ int32_t SetThumbnail(bool isEnabled); + /** + * @brief Set the Thumbnail profile. + * + * @param isEnabled quickThumbnail is enabled. + */ + int32_t SetRawPhotoInfo(sptr &surface); + /** * @brief Set the photo callback. * @@ -332,7 +339,9 @@ public: std::shared_ptr GetDefaultCaptureSetting(); sptr thumbnailSurface_; - + + sptr rawPhotoSurface_; + sptr deferredSurface_; private: diff --git a/interfaces/inner_api/native/camera/include/session/capture_scene_const.h b/interfaces/inner_api/native/camera/include/session/capture_scene_const.h index f208d207a..a7af2ca81 100644 --- a/interfaces/inner_api/native/camera/include/session/capture_scene_const.h +++ b/interfaces/inner_api/native/camera/include/session/capture_scene_const.h @@ -22,6 +22,17 @@ namespace OHOS { namespace CameraStandard { +enum JsSceneMode : int32_t { + JS_NORMAL = 0, + JS_CAPTURE = 1, + JS_VIDEO = 2, + JS_PORTRAIT = 3, + JS_NIGHT = 4, + JS_SLOW_MOTION = 7, + JS_PROFESSIONAL_PHOTO = 11, + JS_PROFESSIONAL_VIDEO = 12, +}; + enum SceneMode : int32_t { NORMAL = 0, CAPTURE = 1, @@ -33,6 +44,8 @@ enum SceneMode : int32_t { SCAN = 7, CAPTURE_MACRO = 8, VIDEO_MACRO = 9, + PROFESSIONAL_PHOTO = 11, + PROFESSIONAL_VIDEO = 12, HIGH_FRAME_RATE = 13 }; diff --git a/interfaces/inner_api/native/camera/include/session/capture_session.h b/interfaces/inner_api/native/camera/include/session/capture_session.h index 575541e2b..b1f07014c 100644 --- a/interfaces/inner_api/native/camera/include/session/capture_session.h +++ b/interfaces/inner_api/native/camera/include/session/capture_session.h @@ -224,6 +224,13 @@ public: virtual void OnSmoothZoom(int32_t duration) = 0; }; +class AbilityCallback { +public: + AbilityCallback() = default; + virtual ~AbilityCallback() = default; + virtual void OnAbilityChange() = 0; +}; + enum VideoStabilizationMode { OFF = 0, LOW, @@ -232,6 +239,18 @@ enum VideoStabilizationMode { AUTO }; +inline bool FloatIsEqual(float x, float y) +{ + const float EPSILON = 0.000001; + return std::fabs(x - y) < EPSILON; +} + +inline float ConfusingNumber(float data) +{ + const float factor = 20; + return data * factor; +} + class CaptureSession : public RefBase { public: class CaptureSessionMetadataResultProcessor : public MetadataResultProcessor { @@ -323,7 +342,7 @@ public: * @return Returns the pointer to SessionCallback set by application. */ std::shared_ptr GetApplicationCallback(); - + /** * @brief Get the ExposureCallback. * @@ -900,6 +919,29 @@ public: */ void SetColorEffect(ColorEffect colorEffect); +// Focus Distance + /** + * @brief Get the current FocusDistance. + * @param distance current Focus Distance. + * @return Returns errCode. + */ + int32_t GetFocusDistance(float& distance); + + /** + * @brief Set Focus istance. + * + * @param distance to be set. + * @return Returns errCode. + */ + int32_t SetFocusDistance(float distance); + + /** + * @brief Get the current FocusDistance. + * @param distance current Focus Distance. + * @return Returns errCode. + */ + float GetMinimumFocusDistance(); + /** * @brief Check current status is support macro or not. */ @@ -993,6 +1035,12 @@ public: const std::shared_ptr &result); void ProcessSnapshotDurationUpdates(const uint64_t timestamp, const std::shared_ptr &result); + + virtual std::shared_ptr GetMetadata(); + + void ExecuteAbilityChangeCallback(); + void SetAbilityCallback(std::shared_ptr abilityCallback); + inline std::shared_ptr GetMetadataResultProcessor() { return metadataResultProcessor_; @@ -1009,7 +1057,20 @@ protected: std::map beautyTypeAndLevels_; std::shared_ptr metadataResultProcessor_ = nullptr; bool isImageDeferred_; + static const std::unordered_map metaExposureModeMap_; + static const std::unordered_map fwkExposureModeMap_; + + static const std::unordered_map metaFocusModeMap_; + static const std::unordered_map fwkFocusModeMap_; + + static const std::unordered_map metaFlashModeMap_; + static const std::unordered_map fwkFlashModeMap_; + + static const std::unordered_map metaColorEffectMap_; + static const std::unordered_map fwkColorEffectMap_; +protected: + std::shared_ptr abilityCallback_; private: std::mutex changeMetaMutex_; std::mutex sessionCallbackMutex_; @@ -1031,12 +1092,7 @@ private: volatile bool isSetMoonCaptureBoostEnable_ = false; static const std::unordered_map metaFocusStateMap_; static const std::unordered_map metaExposureStateMap_; - static const std::unordered_map metaExposureModeMap_; - static const std::unordered_map fwkExposureModeMap_; - static const std::unordered_map metaFocusModeMap_; - static const std::unordered_map fwkFocusModeMap_; - static const std::unordered_map metaFlashModeMap_; - static const std::unordered_map fwkFlashModeMap_; + static const std::unordered_map metaFilterTypeMap_; static const std::unordered_map fwkFilterTypeMap_; static const std::unordered_map metaBeautyTypeMap_; @@ -1046,8 +1102,7 @@ private: static const std::unordered_map metaBeautyControlMap_; static const std::unordered_map metaVideoStabModesMap_; static const std::unordered_map fwkVideoStabModesMap_; - static const std::unordered_map metaColorEffectMap_; - static const std::unordered_map fwkColorEffectMap_; + sptr metaOutput_; sptr photoOutput_; std::atomic prevDuration_ = 0; diff --git a/interfaces/inner_api/native/camera/include/session/portrait_session.h b/interfaces/inner_api/native/camera/include/session/portrait_session.h index d42696303..a64ad9fc9 100644 --- a/interfaces/inner_api/native/camera/include/session/portrait_session.h +++ b/interfaces/inner_api/native/camera/include/session/portrait_session.h @@ -109,7 +109,6 @@ public: void SetPhysicalAperture(const float virtualAperture); private: - bool FloatIsEqual(float x, float y); static const std::unordered_map metaToFwPortraitEffect_; static const std::unordered_map fwToMetaPortraitEffect_; }; diff --git a/interfaces/inner_api/native/camera/include/session/profession_session.h b/interfaces/inner_api/native/camera/include/session/profession_session.h new file mode 100644 index 000000000..a0ae86061 --- /dev/null +++ b/interfaces/inner_api/native/camera/include/session/profession_session.h @@ -0,0 +1,559 @@ +/* + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OHOS_CAMERA_PROFESSION_SESSION_H +#define OHOS_CAMERA_PROFESSION_SESSION_H + +#include +#include +#include +#include +#include +#include "camera_device.h" +#include "camera_error_code.h" +#include "input/capture_input.h" +#include "output/capture_output.h" +#include "icamera_util.h" +#include "icapture_session.h" +#include "icapture_session_callback.h" +#include "capture_session.h" + +namespace OHOS { +namespace CameraStandard { +class ExposureInfoCallback; +class IsoInfoCallback; +class ApertureInfoCallback; +class LuminationInfoCallback; +typedef enum { + METERING_MODE_REGION = 0, + METERING_MODE_CENTER_WEIGHTED, + METERING_MODE_SPOT, + METERING_MODE_OVERALL, +} MeteringMode; + +typedef enum { + FOCUS_ASSIST_FLASH_MODE_OFF = 0, + FOCUS_ASSIST_FLASH_MODE_ON, + FOCUS_ASSIST_FLASH_MODE_DEFAULT, + FOCUS_ASSIST_FLASH_MODE_AUTO, +} FocusAssistFlashMode; + +typedef enum { + AWB_MODE_AUTO = 0, + AWB_MODE_CLOUDY_DAYLIGHT, + AWB_MODE_INCANDESCENT, + AWB_MODE_FLUORESCENT, + AWB_MODE_DAYLIGHT, + AWB_MODE_OFF, + AWB_MODE_WARM_FLUORESCENT, + AWB_MODE_TWILIGHT, + AWB_MODE_SHADE, +} WhiteBalanceMode; + +typedef enum { + EXPOSURE_HINT_MODE_OFF = 0, + EXPOSURE_HINT_MODE_ON, + EXPOSURE_HINT_UNSUPPORTED, +} ExposureHintMode; + +typedef enum { + OHOS_CAMERA_EXPOSURE_HINT_UNSUPPORTED = 0, + OHOS_CAMERA_EXPOSURE_HINT_MODE_ON, + OHOS_CAMERA_EXPOSURE_HINT_MODE_OFF, +} camera_exposure_hint_mode_enum_t; + +class ProfessionSession : public CaptureSession { +public: + class ProfessionSessionMetadataResultProcessor : public MetadataResultProcessor { + public: + explicit ProfessionSessionMetadataResultProcessor(wptr session) : session_(session) {} + void ProcessCallbacks( + const uint64_t timestamp, const std::shared_ptr& result) override; + + private: + wptr session_; + }; + + explicit ProfessionSession(sptr& session, + std::vector> devices) : CaptureSession(session) + { + metadataResultProcessor_ = std::make_shared(this); + supportedDevices_.resize(devices.size()); + std::copy(devices.begin(), devices.end(), supportedDevices_.begin()); + } + + ~ProfessionSession(); +// Metering mode + /** + * @brief Get Metering mode. + * @param vector of Metering Mode. + * @return errCode. + */ + int32_t GetSupportedMeteringModes(std::vector& meteringModes); + + /** + * @brief Query whether given meteringMode mode supported. + * + * @param camera_meter_mode_t flash mode to query. + * @param bool True if supported false otherwise. + * @return errCode. + */ + int32_t IsMeteringModeSupported(MeteringMode meteringMode, bool &isSupported); + + /** + * @brief Set Metering Mode. + * @param exposure MeteringMode to be set. + * @return errCode. + */ + int32_t SetMeteringMode(MeteringMode mode); + + /** + * @brief Get MeteringMode. + * @param exposure current MeteringMode . + * @return Returns errCode. + */ + int32_t GetMeteringMode(MeteringMode& mode); + +// ISO + /** + * @brief Get the supported iso. + * + * @return Returns the array of iso. + */ + int32_t GetIsoRange(std::vector& isoRange); + + /** + * @brief Get the iso. + * + * @return Returns the value of iso. + */ + int32_t GetISO(int32_t &iso); + + /** + * @brief Set the iso. + */ + int32_t SetISO(int32_t iso); + + /** + * @brief Check is support manual iso. + */ + bool IsManualIsoSupported(); + +// SensorExposureTime + /** + * @brief Get exposure time range. + * @param vector of exposure time range. + * @return errCode. + */ + int32_t GetSensorExposureTimeRange(std::vector &sensorExposureTimeRange); + + /** + * @brief Set exposure time value. + * @param exposure compensation value to be set. + * @return errCode. + */ + int32_t SetSensorExposureTime(uint32_t sensorExposureTime); + + /** + * @brief Get exposure time value. + * @param exposure current exposure time value . + * @return Returns errCode. + */ + int32_t GetSensorExposureTime(uint32_t &sensorExposureTime); + +//Apertures + /** + * @brief Get the supported physical apertures. + * + * @return Returns the array of physical aperture. + */ + int32_t GetSupportedPhysicalApertures(std::vector>& apertures); + + /** + * @brief Get the physical aperture. + * + * @return Returns current physical aperture. + */ + int32_t GetPhysicalAperture(float& aperture); + + /** + * @brief Set the physical aperture. + */ + int32_t SetPhysicalAperture(float physicalAperture); + +// Focus mode + /** + * @brief Get Metering mode. + * @param vector of Metering Mode. + * @return errCode. + */ + int32_t GetSupportedFocusModes(std::vector& modes); + + /** + * @brief Query whether given focus mode supported. + * + * @param camera_focus_mode_enum_t focus mode to query. + * @param bool True if supported false otherwise. + * @return errCode. + */ + int32_t IsFocusModeSupported(FocusMode focusMode, bool &isSupported); + + /** + * @brief Set Metering Mode. + * @param exposure MeteringMode to be set. + * @return errCode. + */ + int32_t SetFocusMode(FocusMode mode); + + /** + * @brief Get MeteringMode. + * @param exposure current MeteringMode . + * @return Returns errCode. + */ + int32_t GetFocusMode(FocusMode& mode); + + /** + * @brief Determine if the given Ouput can be added to session. + * + * @param CaptureOutput to be added to session. + */ + +// White Balance + /** + * @brief Get Metering mode. + * @param vector of Metering Mode. + * @return errCode. + */ + int32_t GetSupportedWhiteBalanceModes(std::vector& modes); + + /** + * @brief Query whether given white-balance mode supported. + * + * @param camera_focus_mode_enum_t white-balance mode to query. + * @param bool True if supported false otherwise. + * @return errCode. + */ + int32_t IsWhiteBalanceModeSupported(WhiteBalanceMode mode, bool &isSupported); + + /** + * @brief Set WhiteBalanceMode. + * @param mode WhiteBalanceMode to be set. + * @return errCode. + */ + int32_t SetWhiteBalanceMode(WhiteBalanceMode mode); + + /** + * @brief Get WhiteBalanceMode. + * @param mode current WhiteBalanceMode . + * @return Returns errCode. + */ + int32_t GetWhiteBalanceMode(WhiteBalanceMode& mode); + + /** + * @brief Get ManualWhiteBalance Range. + * @param whiteBalanceRange supported Manual WhiteBalance range . + * @return Returns errCode. + */ + int32_t GetManualWhiteBalanceRange(std::vector &whiteBalanceRange); + + /** + * @brief Is Manual WhiteBalance Supported. + * @param isSupported is Support Manual White Balance . + * @return Returns errCode. + */ + int32_t IsManualWhiteBalanceSupported(bool &isSupported); + + /** + * @brief Set Manual WhiteBalance. + * @param wbValue WhiteBalance value to be set. + * @return Returns errCode. + */ + int32_t SetManualWhiteBalance(int32_t wbValue); + + /** + * @brief Get ManualWhiteBalance. + * @param wbValue WhiteBalance value to be get. + * @return Returns errCode. + */ + int32_t GetManualWhiteBalance(int32_t &wbValue); + +// ExposureHint mode + /** + * @brief Get ExposureHint mode. + * @param vector of ExposureHint Mode. + * @return errCode. + */ + int32_t GetSupportedExposureHintModes(std::vector& modes); + + /** + * @brief Set ExposureHint Mode. + * @param mode ExposureHint Mode to be set. + * @return errCode. + */ + int32_t SetExposureHintMode(ExposureHintMode mode); + + /** + * @brief Get MeteringMode. + * @param mode current MeteringMode . + * @return Returns errCode. + */ + int32_t GetExposureHintMode(ExposureHintMode& mode); + +// FocusAssistFlash mode + /** + * @brief Get FocusAssistFlash mode. + * @param vector of FocusAssistFlash Mode. + * @return errCode. + */ + int32_t GetSupportedFocusAssistFlashModes(std::vector& modes); + + /** + * @brief Query whether given focus assist flash mode supported. + * + * @param FocusAssistFlashMode focus assist flash mode to query. + * @param bool True if supported false otherwise. + * @return errCode. + */ + int32_t IsFocusAssistFlashModeSupported(FocusAssistFlashMode mode, bool &isSupported); + + /** + * @brief Set FocusAssistFlashMode. + * @param mode FocusAssistFlash Mode to be set. + * @return errCode. + */ + int32_t SetFocusAssistFlashMode(FocusAssistFlashMode mode); + + /** + * @brief Get FocusAssistFlash Mode. + * @param mode current FocusAssistFlash Mode . + * @return Returns errCode. + */ + int32_t GetFocusAssistFlashMode(FocusAssistFlashMode& mode); + +// Flash Mode + /** + * @brief Get the supported Focus modes. + * @param vector of camera_focus_mode_enum_t supported exposure modes. + * @return Returns errCode. + */ + int32_t GetSupportedFlashModes(std::vector& flashModes); + + /** + * @brief Check whether camera has flash. + * @param bool True is has flash false otherwise. + * @return Returns errCode. + */ + int32_t HasFlash(bool& hasFlash); + + /** + * @brief Query whether given flash mode supported. + * + * @param camera_flash_mode_enum_t flash mode to query. + * @param bool True if supported false otherwise. + * @return errCode. + */ + int32_t IsFlashModeSupported(FlashMode flashMode, bool& isSupported); + + /** + * @brief Get the current flash mode. + * @param current flash mode. + * @return Returns errCode. + */ + int32_t GetFlashMode(FlashMode& flashMode); + + /** + * @brief Set flash mode. + * + * @param camera_flash_mode_enum_t flash mode to be set. + * @return Returns errCode. + */ + int32_t SetFlashMode(FlashMode flashMode); + /** + * @brief Determine if the given Ouput can be added to session. + * + * @param CaptureOutput to be added to session. + */ +// XMAGE + /** + * @brief Get the supported color effect. + * + * @return Returns supported color effects. + */ + int32_t GetSupportedColorEffects(std::vector& colorEffects); + + /** + * @brief Get the current color effect. + * + * @return Returns current color effect. + */ + int32_t GetColorEffect(ColorEffect& colorEffect); + + /** + * @brief Set the color effect. + */ + int32_t SetColorEffect(ColorEffect colorEffect); + +// SensorExposureTime Callback + /** + * @brief Set the SensorExposureTime callback. + * which will be called when there is SensorExposureTime change. + * + * @param The ExposureInfoCallback pointer. + */ + void SetExposureInfoCallback(std::shared_ptr callback); +// Focus Callback + /** + * @brief Set the ISO callback. + * which will be called when there is ISO state change. + * + * @param The IsoInfoCallback pointer. + */ + void SetIsoInfoCallback(std::shared_ptr callback); +// Exposure Callback + /** + * @brief Set the focus distance callback. + * which will be called when there is focus distance change. + * + * @param The ApertureInfoCallback pointer. + */ + void SetApertureInfoCallback(std::shared_ptr callback); +// Exposurehint Callback + /** + * @brief Set the exposure hint callback. + * which will be called when there is exposure hint change. + * + * @param The LuminationInfoCallback pointer. + */ + void SetLuminationInfoCallback(std::shared_ptr callback); + + /** + * @brief This function is called when there is SensorExposureTime change + * and process the SensorExposureTime callback. + * + * @param result Metadata got from callback from service layer. + */ + void ProcessSensorExposureTimeChange(const std::shared_ptr& result); + + /** + * @brief This function is called when there is Iso change + * and process the Iso callback. + * + * @param result Metadata got from callback from service layer. + */ + void ProcessIsoChange(const std::shared_ptr& result); + + /** + * @brief This function is called when there is Aperture change + * and process the Aperture callback. + * + * @param result Metadata got from callback from service layer. + */ + void ProcessApertureChange(const std::shared_ptr &result); + + /** + * @brief This function is called when there is Lumination change + * and process the Lumination callback. + * + * @param result Metadata got from callback from service layer. + */ + void ProcessLuminationChange(const std::shared_ptr &result); + + /** + * @brief This function is called when physical camera switch + * and process the ability change callback. + * + * @param result Metadata got from callback from service layer. + */ + void ProcessPhysicalCameraSwitch(const std::shared_ptr& result); + + std::shared_ptr GetMetadata() override; + + bool CanAddOutput(sptr& output, SceneMode modeName = SceneMode::PROFESSIONAL_VIDEO) override; +protected: + static const std::unordered_map metaMeteringModeMap_; + static const std::unordered_map fwkMeteringModeMap_; + + static const std::unordered_map + metaFocusAssistFlashModeMap_; + static const std::unordered_map + fwkFocusAssistFlashModeMap_; + + static const std::unordered_map + metaWhiteBalanceModeMap_; + static const std::unordered_map + fwkWhiteBalanceModeMap_; + + static const std::unordered_map metaExposureHintModeMap_; + static const std::unordered_map fwkExposureHintModeMap_; +private: + std::mutex sessionCallbackMutex_; + std::shared_ptr exposureInfoCallback_ = nullptr; + std::shared_ptr isoInfoCallback_ = nullptr; + std::shared_ptr apertureInfoCallback_ = nullptr; + std::shared_ptr luminationInfoCallback_ = nullptr; + std::atomic physicalCameraId_ = 0; + uint32_t exposureDurationValue_ = 0; + uint32_t isoValue_ = 0; + float luminationValue_ = 0.0; + float apertureValue_ = 0.0; + std::vector > supportedDevices_; +}; + +typedef struct { + uint32_t exposureDurationValue; +} ExposureInfo; + +typedef struct { + uint32_t isoValue; +} IsoInfo; + +typedef struct { + float apertureValue; +} ApertureInfo; + +typedef struct { + float luminationValue; +} LuminationInfo; + +class ExposureInfoCallback { +public: + ExposureInfoCallback() = default; + virtual ~ExposureInfoCallback() = default; + virtual void OnExposureInfoChanged(ExposureInfo info) = 0; +}; + +class IsoInfoCallback { +public: + IsoInfoCallback() = default; + virtual ~IsoInfoCallback() = default; + virtual void OnIsoInfoChanged(IsoInfo info) = 0; +}; + +class ApertureInfoCallback { +public: + ApertureInfoCallback() = default; + virtual ~ApertureInfoCallback() = default; + virtual void OnApertureInfoChanged(ApertureInfo info) = 0; +}; + +class LuminationInfoCallback { +public: + LuminationInfoCallback() = default; + virtual ~LuminationInfoCallback() = default; + virtual void OnLuminationInfoChanged(LuminationInfo info) = 0; +}; +} // namespace CameraStandard +} // namespace OHOS +#endif // OHOS_CAMERA_PROFESSION_SESSION_H diff --git a/interfaces/inner_api/native/test/BUILD.gn b/interfaces/inner_api/native/test/BUILD.gn index 131267d92..955f162c7 100644 --- a/interfaces/inner_api/native/test/BUILD.gn +++ b/interfaces/inner_api/native/test/BUILD.gn @@ -181,3 +181,39 @@ ohos_executable("camera_capture_mode") { part_name = "camera_framework" subsystem_name = "multimedia" } + +ohos_executable("camera_capture_profession") { + install_enable = false + sources = [ + "camera_capture_profession.cpp", + "test_common.cpp", + ] + cflags = [ + "-fPIC", + "-g", + ] + + cflags += [ "-Wall" ] + cflags_cc = cflags + + configs = [ ":camera_config" ] + + deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] + + external_deps = [ + "access_token:libaccesstoken_sdk", + "access_token:libnativetoken", + "access_token:libtoken_setproc", + "c_utils:utils", + "drivers_interface_camera:libcamera_proxy_1.0", + "drivers_interface_camera:libcamera_proxy_1.1", + "drivers_interface_camera:metadata", + "graphic_surface:surface", + "hilog:libhilog", + "hisysevent:libhisysevent", + "hitrace:hitrace_meter", + "ipc:ipc_core", + ] + part_name = "camera_framework" + subsystem_name = "multimedia" +} diff --git a/interfaces/inner_api/native/test/camera_capture_mode.cpp b/interfaces/inner_api/native/test/camera_capture_mode.cpp index a9fa347c2..3681d1644 100644 --- a/interfaces/inner_api/native/test/camera_capture_mode.cpp +++ b/interfaces/inner_api/native/test/camera_capture_mode.cpp @@ -30,7 +30,6 @@ #include "accesstoken_kit.h" #include "nativetoken_kit.h" #include "token_setproc.h" -#include "camera_utils_stub.h" using namespace std; using namespace OHOS; using namespace OHOS::CameraStandard; @@ -259,8 +258,6 @@ std::vector GetSupportedFiltersStub() int main(int argc, char **argv) { cout<<"-----------------version:20230822-----------------"< +#include +#include +#include "input/camera_input.h" +#include "input/camera_manager.h" +#include "output/camera_output_capability.h" + +#include "camera_log.h" +#include "surface.h" +#include "test_common.h" + +#include "ipc_skeleton.h" +#include "access_token.h" +#include "hap_token_info.h" +#include "accesstoken_kit.h" +#include "nativetoken_kit.h" +#include "token_setproc.h" +using namespace std; +using namespace OHOS; +using namespace OHOS::CameraStandard; + +std::map g_abilityIdStr_ = { + {536870912, "OHOS_ABILITY_SCENE_FILTER_TYPES"}, + {536870914, "OHOS_ABILITY_SCENE_PORTRAIT_EFFECT_TYPES"}, + {536870916, "OHOS_ABILITY_SCENE_BEAUTY_TYPES"} +}; +int main(int argc, char **argv) +{ + cout<<"-----------------version:20240322-----------------"< camManagerObj = CameraManager::GetInstance(); + MEDIA_INFO_LOG("Setting callback to listen camera status and flash status"); + camManagerObj->SetCallback(std::make_shared(testName)); + std::vector> cameraObjList = camManagerObj->GetSupportedCameras(); + if (cameraObjList.size() == 0) { + return 0; + } + sptr device = cameraObjList[0]; + MEDIA_INFO_LOG("Camera ID count: %{public}zu", cameraObjList.size()); + for (auto& it : cameraObjList) { + MEDIA_INFO_LOG("Camera ID: %{public}s", it->GetID().c_str()); + MEDIA_INFO_LOG("Camera Posion: %{public}d", it->GetPosition()); + if (devicePosion == 0 && it->GetPosition() == CameraPosition::CAMERA_POSITION_BACK) { + device = it; + break; + } + if (devicePosion == 1 && it->GetPosition() == CameraPosition::CAMERA_POSITION_FRONT) { + device = it; + break; + } + } + cout<<"Camera ID ="<GetID()<<",camera Position = "<GetPosition()< supportedModes = camManagerObj->GetSupportedModes(device); + std::string modes = ""; + for (auto mode : supportedModes) { + modes += std::to_string(static_cast(mode)) + " , "; + } + MEDIA_INFO_LOG("supportedModes : %{public}s", modes.c_str()); + sptr captureSession = camManagerObj->CreateCaptureSession(SceneMode::PROFESSIONAL_VIDEO); + sptr professionSession = nullptr; + professionSession = static_cast (captureSession.GetRefPtr()); + if (professionSession == nullptr) { + return 0; + } + professionSession->BeginConfig(); + sptr captureInput = camManagerObj->CreateCameraInput(device); + if (captureInput == nullptr) { + return 0; + } + + sptr cameraInput = (sptr &)captureInput; + cameraInput->Open(); + std::vector photoProfiles = {}; + std::vector previewProfiles = {}; + std::vector videoProfiles = {}; + string abilityIds = ""; + if (isResolutionConfigured) { + std::vector previewFormats; + std::vector photoFormats; + std::vector videoFormats; + std::vector previewSizes; + std::vector photoSizes; + std::vector videoSizes; + sptr outputcapability = + camManagerObj->GetSupportedOutputCapability(device, SceneMode::PROFESSIONAL_VIDEO); + previewProfiles = outputcapability->GetPreviewProfiles(); + uint32_t profileIndex = 0; + for (auto i : previewProfiles) { + previewFormats.push_back(i.GetCameraFormat()); + previewSizes.push_back(i.GetSize()); + + abilityIds = ""; + for (auto id : i.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("index(%{public}d), preview profile f(%{public}d), w(%{public}d), h(%{public}d) " + "support ability: %{public}s", profileIndex++, + i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); + } + + photoProfiles = outputcapability->GetPhotoProfiles(); + profileIndex = 0; + for (auto i : photoProfiles) { + photoFormats.push_back(i.GetCameraFormat()); + photoSizes.push_back(i.GetSize()); + abilityIds = ""; + for (auto id : i.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("index %{public}d, photo support format : %{public}d, width: %{public}d, height: %{public}d" + "support ability: %{public}s", profileIndex++, + i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); + } + + videoProfiles = outputcapability->GetVideoProfiles(); + profileIndex = 0; + for (auto i : videoProfiles) { + videoFormats.push_back(i.GetCameraFormat()); + videoSizes.push_back(i.GetSize()); + + abilityIds = ""; + for (auto id : i.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("index(%{public}d), video profile f(%{public}d), w(%{public}d), h(%{public}d) " + "support ability: %{public}s", profileIndex++, + i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); + } + } + + MEDIA_INFO_LOG("photoCaptureCount: %{public}d", photoCaptureCount); + + cameraInput->SetErrorCallback(std::make_shared(testName)); + ret = professionSession->AddInput(captureInput); + if (ret != 0) { + return 0; + } + + sptr photoSurface = IConsumerSurface::Create(); + if (photoSurface == nullptr) { + return 0; + } + photosize.width = photoWidth; + photosize.height = photoHeight; + Profile photoprofile; + for (auto it : photoProfiles) { + if (it.GetSize().width == photosize.width && it.GetSize().height == photosize.height + && it.GetCameraFormat() == static_cast(photoFormat)) { + photoprofile = it; + } + } + abilityIds = ""; + for (auto id : photoprofile.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("photo support format : %{public}d, width: %{public}d, height: %{public}d" + "support ability: %{public}s", + photoFormat, photoWidth, photoHeight, abilityIds.c_str()); + cout<< "photoFormat: " << photoFormat << " photoWidth: "<< photoWidth + << " photoHeight: " << photoHeight << " support ability: " << abilityIds.c_str() << endl; + + sptr captureListener = new(std::nothrow) SurfaceListener("Photo", SurfaceType::PHOTO, + photoFd, photoSurface); + photoSurface->RegisterConsumerListener((sptr &)captureListener); + + sptr metaOutput = camManagerObj->CreateMetadataOutput(); + MEDIA_INFO_LOG("Setting Meta callback"); + ((sptr &)metaOutput)->SetCallback(std::make_shared(testName)); + + ret = professionSession->AddOutput(metaOutput); + if (ret != 0) { + return 0; + } + + sptr videoSurface = IConsumerSurface::Create(); + if (videoSurface == nullptr) { + return 0; + } + videosize.width = videoWidth; + videosize.height = videoHeight; + VideoProfile videoprofile; + for (auto it : videoProfiles) { + if (it.GetSize().width == videosize.width && it.GetSize().height == videosize.height + && it.GetCameraFormat() == static_cast(videoFormat)) { + videoprofile = it; + } + } + abilityIds = ""; + for (auto id : videoprofile.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("videoFormat: %{public}d, videoWidth: %{public}d, videoHeight: %{public}d" + "support ability: %{public}s", + videoFormat, videoWidth, videoHeight, abilityIds.c_str()); + + cout<< "videoFormat: " << videoFormat << " videoWidth: "<< videoWidth + << " videoHeight: " << videoHeight << " support ability: " << abilityIds.c_str() < listener = new(std::nothrow) SurfaceListener("Video", SurfaceType::VIDEO, + videoFd, videoSurface); + videoSurface->RegisterConsumerListener((sptr &)listener); + sptr videoProducer = videoSurface->GetProducer(); + sptr videoProducerSurface = Surface::CreateSurfaceAsProducer(videoProducer); + sptr videoOutput = camManagerObj->CreateVideoOutput(videoprofile, videoProducerSurface); + if (videoOutput == nullptr) { + return 0; + } + + MEDIA_INFO_LOG("Setting video callback"); + ((sptr &)videoOutput)->SetCallback(std::make_shared(testName)); + ret = professionSession->AddOutput(videoOutput); + if (ret != 0) { + return 0; + } + + sptr previewSurface = IConsumerSurface::Create(); + if (previewSurface == nullptr) { + return 0; + } + previewsize.width = previewWidth; + previewsize.height = previewHeight; + Profile previewprofile; + for (auto it : previewProfiles) { + if (it.GetSize().width == previewsize.width && it.GetSize().height == previewsize.height + && it.GetCameraFormat() == static_cast(previewFormat)) { + previewprofile = it; + } + } + abilityIds = ""; + for (auto id : previewprofile.GetAbilityId()) { + abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; + } + MEDIA_INFO_LOG("previewFormat: %{public}d, previewWidth: %{public}d, previewHeight: %{public}d" + "support ability: %{public}s", + previewFormat, previewWidth, previewHeight, abilityIds.c_str()); + + cout<< "previewFormat: " << previewFormat << " previewWidth: "<< previewWidth + << " previewHeight: " << previewHeight << " support ability: " << abilityIds.c_str() < videoListener = new(std::nothrow) SurfaceListener("Preview", SurfaceType::PREVIEW, + previewFd, previewSurface); + previewSurface->RegisterConsumerListener((sptr &)videoListener); + sptr previewProducer = previewSurface->GetProducer(); + sptr previewProducerSurface = Surface::CreateSurfaceAsProducer(previewProducer); + sptr previewOutput = camManagerObj->CreatePreviewOutput(previewprofile, previewProducerSurface); + if (previewOutput == nullptr) { + return 0; + } + + MEDIA_INFO_LOG("Setting preview callback"); + ((sptr &)previewOutput)->SetCallback(std::make_shared(testName)); + ret = professionSession->AddOutput(previewOutput); + if (ret != 0) { + return 0; + } + + ret = professionSession->CommitConfig(); + if (ret != 0) { + return 0; + } + + MEDIA_INFO_LOG("Preview started"); + ret = professionSession->Start(); + if (ret != 0) { + return 0; + } + + sleep(previewCaptureGap); + + ret = ((sptr &)videoOutput)->Start(); + if (ret != 0) { + MEDIA_ERR_LOG("Failed to start recording, result: %{public}d", ret); + return ret; + } + sleep(videoDurationGap); + ret = ((sptr &)videoOutput)->Stop(); + if (ret != 0) { + MEDIA_ERR_LOG("Failed to stop recording, result: %{public}d", ret); + return ret; + } + ret = TestUtils::SaveVideoFile(nullptr, 0, VideoSaveMode::CLOSE, videoFd); + videoFd = -1; + sleep(gapAfterCapture); + + MEDIA_INFO_LOG("Closing the session"); + ((sptr &)previewOutput)->Stop(); + professionSession->Stop(); + professionSession->Release(); + cameraInput->Release(); + camManagerObj->SetCallback(nullptr); + + MEDIA_INFO_LOG("Camera new sample end."); + return 0; +} diff --git a/interfaces/kits/js/camera_napi/@ohos.multimedia.camera.d.ts b/interfaces/kits/js/camera_napi/@ohos.multimedia.camera.d.ts index dd866ac49..7a1b47b39 100644 --- a/interfaces/kits/js/camera_napi/@ohos.multimedia.camera.d.ts +++ b/interfaces/kits/js/camera_napi/@ohos.multimedia.camera.d.ts @@ -1712,7 +1712,7 @@ declare namespace camera { /** * Notify device to prepare for zoom. - * + * * @throws { BusinessError } 7400103 - Session not config. * @syscap SystemCapability.Multimedia.Camera.Core * @systemapi @@ -2321,6 +2321,26 @@ declare namespace camera { * @since 11 */ off(type: 'smoothZoomInfoAvailable', callback?: AsyncCallback): void; + + /** + * Subscribes to ability change event callback. + * + * @param { 'abilityChange' } type - Event type. + * @param { AsyncCallback } callback - Callback used to return the result. + * @syscap SystemCapability.Multimedia.Camera.Core + * @since 11 + */ + on(type: 'abilityChange', callback: AsyncCallback): void; + + /** + * Unsubscribes from ability change event callback. + * + * @param { 'abilityChange' } type - Event type. + * @param { AsyncCallback } callback - Callback used to return the result. + * @syscap SystemCapability.Multimedia.Camera.Core + * @since 11 + */ + off(type: 'abilityChange', callback?: AsyncCallback): void; } /** @@ -3734,7 +3754,7 @@ declare namespace camera { * @since 12 */ off(type: 'estimatedCaptureDuration', callback?: AsyncCallback): void; - + /** * Subscribes to error events. * @@ -3772,7 +3792,7 @@ declare namespace camera { * The method must be called after Session.addInput() and Session.addOutput(photoOutput) are called. * To avoid stream reconfiguration and performance loss, * you are advised to call the method before Session.commitConfig(). - * + * * @param { boolean } enabled - The value TRUE means to enable quick thumbnail, and FALSE means the opposite. * @throws { BusinessError } 7400104 - session is not running. * @syscap SystemCapability.Multimedia.Camera.Core diff --git a/interfaces/kits/js/camera_napi/BUILD.gn b/interfaces/kits/js/camera_napi/BUILD.gn index a5f2e3933..1f90299c1 100644 --- a/interfaces/kits/js/camera_napi/BUILD.gn +++ b/interfaces/kits/js/camera_napi/BUILD.gn @@ -64,6 +64,7 @@ ohos_shared_library("camera_napi") { "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/photo_session_for_sys_napi.cpp", "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/photo_session_napi.cpp", "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/portrait_session_napi.cpp", + "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp", "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/video_session_for_sys_napi.cpp", "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/mode/video_session_napi.cpp", "${multimedia_camera_framework_path}/frameworks/js/camera_napi/src/native_module_ohos_camera.cpp", diff --git a/interfaces/kits/js/camera_napi/include/input/camera_info_napi.h b/interfaces/kits/js/camera_napi/include/input/camera_info_napi.h index 5def82cb6..eb89a6442 100644 --- a/interfaces/kits/js/camera_napi/include/input/camera_info_napi.h +++ b/interfaces/kits/js/camera_napi/include/input/camera_info_napi.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2024 Huawei Device Co., Ltd. + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -47,7 +47,6 @@ private: static napi_value GetHostDeviceName(napi_env env, napi_callback_info info); static napi_value GetHostDeviceType(napi_env env, napi_callback_info info); static napi_value GetCameraOrientation(napi_env env, napi_callback_info info); - napi_env env_; napi_ref wrapper_; diff --git a/interfaces/kits/js/camera_napi/include/input/camera_napi.h b/interfaces/kits/js/camera_napi/include/input/camera_napi.h index 9667fcd8d..ceb1bd642 100644 --- a/interfaces/kits/js/camera_napi/include/input/camera_napi.h +++ b/interfaces/kits/js/camera_napi/include/input/camera_napi.h @@ -16,6 +16,7 @@ #ifndef CAMERA_NAPI_H_ #define CAMERA_NAPI_H_ +#include "capture_scene_const.h" #include "hilog/log.h" #include "camera_napi_utils.h" #include "output/camera_output_capability.h" @@ -57,7 +58,7 @@ static const std::int32_t VIDEO_DEFAULT_WIDTH = 640; static const std::int32_t VIDEO_DEFAULT_HEIGHT = 360; static const std::int32_t SURFACE_QUEUE_SIZE = 10; - + static const std::unordered_map mapFlashMode = { {"FLASH_MODE_CLOSE", 0}, {"FLASH_MODE_OPEN", 1}, @@ -103,6 +104,7 @@ static const std::unordered_map mapCameraFormat = { {"CAMERA_FORMAT_YUV_420_SP", CameraFormat::CAMERA_FORMAT_YUV_420_SP}, {"CAMERA_FORMAT_JPEG", CameraFormat::CAMERA_FORMAT_JPEG}, {"CAMERA_FORMAT_RGBA_8888", CameraFormat::CAMERA_FORMAT_RGBA_8888}, + {"CAMERA_FORMAT_DNG", CameraFormat::CAMERA_FORMAT_DNG}, {"CAMERA_FORMAT_YCBCR_P010", CameraFormat::CAMERA_FORMAT_YCBCR_P010}, {"CAMERA_FORMAT_YCRCB_P010", CameraFormat::CAMERA_FORMAT_YCRCB_P010}, }; @@ -153,15 +155,16 @@ static const std::unordered_map mapExposureState = { }; static const std::unordered_map mapSceneMode = { - {"NORMAL", 0}, - {"NORMAL_PHOTO", 1}, - {"NORMAL_VIDEO", 2}, - {"PORTRAIT", 3}, - {"PORTRAIT_PHOTO", 3}, - {"NIGHT", 4}, - {"NIGHT_PHOTO", 4}, - {"PROFESSIONAL", 5}, - {"SLOW_MOTION", 6}, + {"NORMAL", JS_NORMAL}, + {"NORMAL_PHOTO", JS_CAPTURE}, + {"NORMAL_VIDEO", JS_VIDEO}, + {"PORTRAIT", JS_PORTRAIT}, + {"PORTRAIT_PHOTO", JS_PORTRAIT}, + {"NIGHT", JS_NIGHT}, + {"NIGHT_PHOTO", JS_NIGHT}, + {"PROFESSIONAL_PHOTO", JS_PROFESSIONAL_PHOTO}, + {"PROFESSIONAL_VIDEO", JS_PROFESSIONAL_VIDEO}, + {"SLOW_MOTION", JS_SLOW_MOTION}, }; static const std::unordered_map mapFilterType = { @@ -275,6 +278,12 @@ static const std::unordered_map mapRestoreParamType = { {"TRANSIENT_ACTIVE_PARAM", 2}, }; +static const std::unordered_map mapExposureMeteringMode = { + {"MATRIX", 0}, + {"CENTER", 1}, + {"SPOT", 2}, +}; + enum CreateAsyncCallbackModes { CREATE_CAMERA_MANAGER_ASYNC_CALLBACK = 10, }; @@ -335,6 +344,7 @@ private: static thread_local napi_ref SmoothZoomModeRef_; static thread_local napi_ref colorEffectTypeRef_; static thread_local napi_ref restoreParamTypeRef_; + static thread_local napi_ref exposureMeteringModeRef_; napi_env env_; napi_ref wrapper_; }; diff --git a/interfaces/kits/js/camera_napi/include/listener_base.h b/interfaces/kits/js/camera_napi/include/listener_base.h index d4d8b6662..7a691a1db 100644 --- a/interfaces/kits/js/camera_napi/include/listener_base.h +++ b/interfaces/kits/js/camera_napi/include/listener_base.h @@ -40,7 +40,7 @@ public: protected: napi_env env_ = nullptr; -private: +protected: mutable std::mutex baseCbListMutex_; mutable std::vector> baseCbList_; }; diff --git a/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h b/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h new file mode 100644 index 000000000..d9afc790d --- /dev/null +++ b/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef PROFESSION_SESSION_NAPI_H_ +#define PROFESSION_SESSION_NAPI_H_ + +#include "napi/native_api.h" +#include "napi/native_node_api.h" +#include "session/camera_session_napi.h" + +namespace OHOS { +namespace CameraStandard { +class ExposureInfoCallbackListener : public ExposureInfoCallback, public ListenerBase { +public: + ExposureInfoCallbackListener(napi_env env) : ListenerBase(env) {} + ~ExposureInfoCallbackListener() = default; + void OnExposureInfoChanged(ExposureInfo info) override; + +private: + void OnExposureInfoChangedCallback(ExposureInfo info) const; + void OnExposureInfoChangedCallbackAsync(ExposureInfo info) const; +}; + +struct ExposureInfoChangedCallback { + ExposureInfo info_; + const ExposureInfoCallbackListener* listener_; + ExposureInfoChangedCallback(ExposureInfo info, const ExposureInfoCallbackListener* listener) + : info_(info), listener_(listener) {} +}; + +class IsoInfoCallbackListener : public IsoInfoCallback, public ListenerBase { +public: + IsoInfoCallbackListener(napi_env env) : ListenerBase(env) {} + ~IsoInfoCallbackListener() = default; + void OnIsoInfoChanged(IsoInfo info) override; + +private: + void OnIsoInfoChangedCallback(IsoInfo info) const; + void OnIsoInfoChangedCallbackAsync(IsoInfo info) const; +}; + +struct IsoInfoChangedCallback { + IsoInfo info_; + const IsoInfoCallbackListener* listener_; + IsoInfoChangedCallback(IsoInfo info, const IsoInfoCallbackListener* listener) + : info_(info), listener_(listener) {} +}; + +class ApertureInfoCallbackListener : public ApertureInfoCallback, public ListenerBase { +public: + ApertureInfoCallbackListener(napi_env env) : ListenerBase(env) {} + ~ApertureInfoCallbackListener() = default; + void OnApertureInfoChanged(ApertureInfo info) override; + +private: + void OnApertureInfoChangedCallback(ApertureInfo info) const; + void OnApertureInfoChangedCallbackAsync(ApertureInfo info) const; +}; + +struct ApertureInfoChangedCallback { + ApertureInfo info_; + const ApertureInfoCallbackListener* listener_; + ApertureInfoChangedCallback(ApertureInfo info, const ApertureInfoCallbackListener* listener) + : info_(info), listener_(listener) {} +}; + +class LuminationInfoCallbackListener : public LuminationInfoCallback, public ListenerBase { +public: + LuminationInfoCallbackListener(napi_env env) : ListenerBase(env) {} + ~LuminationInfoCallbackListener() = default; + void OnLuminationInfoChanged(LuminationInfo info) override; + +private: + void OnLuminationInfoChangedCallback(LuminationInfo info) const; + void OnLuminationInfoChangedCallbackAsync(LuminationInfo info) const; +}; + +struct LuminationInfoChangedCallback { + LuminationInfo info_; + const LuminationInfoCallbackListener* listener_; + LuminationInfoChangedCallback(LuminationInfo info, const LuminationInfoCallbackListener* listener) + : info_(info), listener_(listener) {} +}; + +static const char PROFESSIONAL_SESSION_NAPI_CLASS_NAME[] = "ProfessionSession"; +class ProfessionSessionNapi : public CameraSessionNapi { +public: + static napi_value Init(napi_env env, napi_value exports); + static napi_value CreateCameraSession(napi_env env, SceneMode mode); + ProfessionSessionNapi(); + ~ProfessionSessionNapi(); + + static void ProfessionSessionNapiDestructor(napi_env env, void* nativeObject, void* finalize_hint); + static napi_value ProfessionSessionNapiConstructor(napi_env env, napi_callback_info info); + + static napi_value GetSupportedMeteringModes(napi_env env, napi_callback_info info); + static napi_value IsMeteringModeSupported(napi_env env, napi_callback_info info); + static napi_value GetMeteringMode(napi_env env, napi_callback_info info); + static napi_value SetMeteringMode(napi_env env, napi_callback_info info); + + static napi_value IsManualIsoSupported(napi_env env, napi_callback_info info); + static napi_value GetIsoRange(napi_env env, napi_callback_info info); + static napi_value GetISO(napi_env env, napi_callback_info info); + static napi_value SetISO(napi_env env, napi_callback_info info); + + static napi_value GetSupportedVirtualApertures(napi_env env, napi_callback_info info); + static napi_value GetVirtualAperture(napi_env env, napi_callback_info info); + static napi_value SetVirtualAperture(napi_env env, napi_callback_info info); + + static napi_value GetExposureDurationRange(napi_env env, napi_callback_info info); + static napi_value GetExposureDuration(napi_env env, napi_callback_info info); + static napi_value SetExposureDuration(napi_env env, napi_callback_info info); + + static napi_value GetSupportedWhiteBalanceModes(napi_env env, napi_callback_info info); + static napi_value IsWhiteBalanceModeSupported(napi_env env, napi_callback_info info); + static napi_value GetWhiteBalanceMode(napi_env env, napi_callback_info info); + static napi_value SetWhiteBalanceMode(napi_env env, napi_callback_info info); + + static napi_value GetManualWhiteBalanceRange(napi_env env, napi_callback_info info); + static napi_value IsManualWhiteBalanceSupported(napi_env env, napi_callback_info info); + static napi_value GetManualWhiteBalance(napi_env env, napi_callback_info info); + static napi_value SetManualWhiteBalance(napi_env env, napi_callback_info info); + + static napi_value GetSupportedExposureHintModes(napi_env env, napi_callback_info info); + static napi_value GetExposureHintMode(napi_env env, napi_callback_info info); + static napi_value SetExposureHintMode(napi_env env, napi_callback_info info); + + static napi_value GetSupportedPhysicalApertures(napi_env env, napi_callback_info info); + static napi_value GetPhysicalAperture(napi_env env, napi_callback_info info); + static napi_value SetPhysicalAperture(napi_env env, napi_callback_info info); + + static napi_value GetSupportedFocusAssistFlashModes(napi_env env, napi_callback_info info); + static napi_value IsFocusAssistFlashModeSupported(napi_env env, napi_callback_info info); + static napi_value GetFocusAssistFlashMode(napi_env env, napi_callback_info info); + static napi_value SetFocusAssistFlashMode(napi_env env, napi_callback_info info); + + static napi_value On(napi_env env, napi_callback_info info); + static napi_value Once(napi_env env, napi_callback_info info); + static napi_value Off(napi_env env, napi_callback_info info); + + std::shared_ptr exposureInfoCallback_ = nullptr; + std::shared_ptr isoInfoCallback_ = nullptr; + std::shared_ptr apertureInfoCallback_ = nullptr; + std::shared_ptr luminationInfoCallback_ = nullptr; + std::shared_ptr abilityCallback_ = nullptr; + + napi_env env_; + napi_ref wrapper_; + sptr professionSession_; + + static thread_local napi_ref sConstructor_; + +protected: + static napi_value ProcessingPhysicalApertures(napi_env env, std::vector> physicalApertures); + + void RegisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) override; + void UnregisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) override; + void RegisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) override; + void UnregisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args) override; + void RegisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) override; + void UnregisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) override; + void RegisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) override; + void UnregisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) override; + void RegisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce) override; + void UnregisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args) override; +}; +} +} +#endif /* PROFESSION_SESSION_NAPI_H_ */ diff --git a/interfaces/kits/js/camera_napi/include/native_module_ohos_camera.h b/interfaces/kits/js/camera_napi/include/native_module_ohos_camera.h index fc9837dc1..50743a8b1 100644 --- a/interfaces/kits/js/camera_napi/include/native_module_ohos_camera.h +++ b/interfaces/kits/js/camera_napi/include/native_module_ohos_camera.h @@ -24,6 +24,7 @@ #include "input/camera_pre_launch_config_napi.h" #include "mode/mode_manager_napi.h" #include "mode/night_session_napi.h" +#include "mode/profession_session_napi.h" #include "mode/portrait_session_napi.h" #include "mode/photo_session_napi.h" #include "mode/photo_session_for_sys_napi.h" diff --git a/interfaces/kits/js/camera_napi/include/output/photo_napi.h b/interfaces/kits/js/camera_napi/include/output/photo_napi.h index 57997f5b7..520d24c6a 100644 --- a/interfaces/kits/js/camera_napi/include/output/photo_napi.h +++ b/interfaces/kits/js/camera_napi/include/output/photo_napi.h @@ -26,10 +26,12 @@ class PhotoNapi { public: static napi_value Init(napi_env env, napi_value exports); static napi_value CreatePhoto(napi_env env, napi_value mainImage); + static napi_value CreateRawPhoto(napi_env env, napi_value mainImage); PhotoNapi(); ~PhotoNapi(); - + static napi_value GetMain(napi_env env, napi_callback_info info); + static napi_value GetRaw(napi_env env, napi_callback_info info); static napi_value Release(napi_env env, napi_callback_info info); private: @@ -38,11 +40,13 @@ private: static thread_local napi_ref sConstructor_; static thread_local napi_value sMainImage_; + static thread_local napi_value sRawImage_; static thread_local uint32_t photoTaskId; napi_env env_; napi_ref wrapper_; napi_value mainImage_; + napi_value rawImage_; }; struct PhotoAsyncContext : public AsyncContext { diff --git a/interfaces/kits/js/camera_napi/include/output/photo_output_napi.h b/interfaces/kits/js/camera_napi/include/output/photo_output_napi.h index 10c45f683..bad433eb6 100644 --- a/interfaces/kits/js/camera_napi/include/output/photo_output_napi.h +++ b/interfaces/kits/js/camera_napi/include/output/photo_output_napi.h @@ -120,6 +120,26 @@ private: napi_ref captureDeferredPhotoCb_; }; +class RawPhotoListener : public IBufferConsumerListener { +public: + explicit RawPhotoListener(napi_env env, const sptr rawPhotoSurface); + ~RawPhotoListener() = default; + void OnBufferAvailable() override; + void SaveCallbackReference(const std::string &eventType, napi_value callback); + void RemoveCallbackRef(napi_env env, napi_value callback, const std::string &eventType); + void RemoveAllCallbacks(const std::string &eventType); + +private: + std::mutex mutex_; + napi_env env_; + sptr rawPhotoSurface_; + shared_ptr bufferProcessor_; + void UpdateJSCallback(sptr rawPhotoSurface) const; + void UpdateJSCallbackAsync(sptr rawPhotoSurface) const; + void ExecuteRawPhoto(sptr rawPhotoSurface) const; + napi_ref captureRawPhotoCb_; +}; + class PhotoOutputCallback : public PhotoStateCallback, public std::enable_shared_from_this { public: explicit PhotoOutputCallback(napi_env env); @@ -200,6 +220,14 @@ struct PhotoListenerInfo { {} }; +struct RawPhotoListenerInfo { + sptr rawPhotoSurface_; + const RawPhotoListener* listener_; + RawPhotoListenerInfo(sptr rawPhotoSurface, const RawPhotoListener* listener) + : rawPhotoSurface_(rawPhotoSurface), listener_(listener) + {} +}; + struct PhotoOutputAsyncContext; class PhotoOutputNapi : public CameraNapiEventEmitter { @@ -292,6 +320,7 @@ private: bool isDeferredPhotoEnabled_ = false; sptr thumbnailListener_; sptr photoListener_; + sptr rawPhotoListener_; std::shared_ptr photoOutputCallback_; static thread_local uint32_t photoOutputTaskId; }; diff --git a/interfaces/kits/js/camera_napi/include/session/camera_session_napi.h b/interfaces/kits/js/camera_napi/include/session/camera_session_napi.h index def061552..2209eb621 100644 --- a/interfaces/kits/js/camera_napi/include/session/camera_session_napi.h +++ b/interfaces/kits/js/camera_napi/include/session/camera_session_napi.h @@ -192,6 +192,22 @@ struct SmoothZoomCallbackInfo { : duration_(duration), listener_(listener) {} }; +class AbilityCallbackListener : public AbilityCallback, public ListenerBase { +public: + AbilityCallbackListener(napi_env env) : ListenerBase(env) {} + ~AbilityCallbackListener() = default; + void OnAbilityChange() override; + +private: + void OnAbilityChangeCallback() const; + void OnAbilityChangeCallbackAsync() const; +}; + +struct AbilityCallbackInfo { + const AbilityCallbackListener* listener_; + AbilityCallbackInfo(const AbilityCallbackListener* listener) : listener_(listener) {} +}; + class CameraSessionNapi : public CameraNapiEventEmitter { public: static napi_value Init(napi_env env, napi_value exports); @@ -240,6 +256,8 @@ public: static napi_value GetSupportedColorEffects(napi_env env, napi_callback_info info); static napi_value GetColorEffect(napi_env env, napi_callback_info info); static napi_value SetColorEffect(napi_env env, napi_callback_info info); + static napi_value GetFocusDistance(napi_env env, napi_callback_info info); + static napi_value SetFocusDistance(napi_env env, napi_callback_info info); static napi_value IsMacroSupported(napi_env env, napi_callback_info info); static napi_value EnableMacro(napi_env env, napi_callback_info info); @@ -286,6 +304,7 @@ public: std::shared_ptr moonCaptureBoostCallback_; std::shared_ptr featureDetectionStatusCallback_; std::shared_ptr smoothZoomCallback_; + std::shared_ptr abilityCallback_; static thread_local napi_ref sConstructor_; static thread_local sptr sCameraSession_; @@ -295,6 +314,7 @@ public: static const std::vector flash_props; static const std::vector auto_exposure_props; static const std::vector focus_props; + static const std::vector manual_focus_props; static const std::vector zoom_props; static const std::vector filter_props; static const std::vector beauty_props; @@ -307,13 +327,16 @@ public: private: void RegisterExposureCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); - void UnregisterExposureCallbackListener(napi_env env, napi_value callback, const std::vector& args); + void UnregisterExposureCallbackListener( + napi_env env, napi_value callback, const std::vector& args); void RegisterFocusCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); - void UnregisterFocusCallbackListener(napi_env env, napi_value callback, const std::vector& args); + void UnregisterFocusCallbackListener( + napi_env env, napi_value callback, const std::vector& args); void RegisterMacroStatusCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); - void UnregisterMacroStatusCallbackListener(napi_env env, napi_value callback, const std::vector& args); + void UnregisterMacroStatusCallbackListener( + napi_env env, napi_value callback, const std::vector& args); void RegisterMoonCaptureBoostCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); void UnregisterMoonCaptureBoostCallbackListener( @@ -324,10 +347,33 @@ private: napi_env env, napi_value callback, const std::vector& args); void RegisterSessionErrorCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); - void UnregisterSessionErrorCallbackListener(napi_env env, napi_value callback, const std::vector& args); + void UnregisterSessionErrorCallbackListener( + napi_env env, napi_value callback, const std::vector& args); void RegisterSmoothZoomCallbackListener( napi_env env, napi_value callback, const std::vector& args, bool isOnce); - void UnregisterSmoothZoomCallbackListener(napi_env env, napi_value callback, const std::vector& args); + void UnregisterSmoothZoomCallbackListener( + napi_env env, napi_value callback, const std::vector& args); +protected: + virtual void RegisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce); + virtual void UnregisterExposureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args); + virtual void RegisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce); + virtual void UnregisterAbilityChangeCallbackListener( + napi_env env, napi_value callback, const std::vector& args); + virtual void RegisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce); + virtual void UnregisterIsoInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args); + virtual void RegisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce); + virtual void UnregisterApertureInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args); + virtual void RegisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args, bool isOnce); + virtual void UnregisterLuminationInfoCallbackListener( + napi_env env, napi_value callback, const std::vector& args); }; struct CameraSessionAsyncContext : public AsyncContext { diff --git a/interfaces/kits/native/include/camera/camera.h b/interfaces/kits/native/include/camera/camera.h index 24a55aadf..91a5fd220 100644 --- a/interfaces/kits/native/include/camera/camera.h +++ b/interfaces/kits/native/include/camera/camera.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2024 Huawei Device Co., Ltd. + * Copyright (c) 2023 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -599,11 +599,6 @@ typedef struct Camera_Device { * Camera connection type attribute. */ Camera_Connection connectionType; - - /** - * Camera orientation. - */ - uint32_t cameraOrientation; } Camera_Device; /** diff --git a/services/camera_service/binder/base/include/camera_service_ipc_interface_code.h b/services/camera_service/binder/base/include/camera_service_ipc_interface_code.h index d2c935dd8..06af99dc9 100644 --- a/services/camera_service/binder/base/include/camera_service_ipc_interface_code.h +++ b/services/camera_service/binder/base/include/camera_service_ipc_interface_code.h @@ -149,7 +149,8 @@ enum StreamCaptureInterfaceCode { CAMERA_STREAM_CAPTURE_CONFIRM, CAMERA_SERVICE_ENABLE_DEFERREDTYPE, CAMERA_STREAM_GET_DEFERRED_PHOTO, - CAMERA_STREAM_GET_DEFERRED_VIDEO + CAMERA_STREAM_GET_DEFERRED_VIDEO, + CAMERA_STREAM_SET_RAW_PHOTO_INFO, }; /** diff --git a/services/camera_service/binder/base/include/istream_capture.h b/services/camera_service/binder/base/include/istream_capture.h index 5bf51fbbd..b5c38eac7 100644 --- a/services/camera_service/binder/base/include/istream_capture.h +++ b/services/camera_service/binder/base/include/istream_capture.h @@ -37,10 +37,12 @@ public: virtual int32_t SetThumbnail(bool isEnabled, const sptr &producer) = 0; + virtual int32_t SetRawPhotoStreamInfo(const sptr &producer) = 0; + virtual int32_t DeferImageDeliveryFor(int32_t type) = 0; virtual int32_t IsDeferredPhotoEnabled() = 0; - + virtual int32_t IsDeferredVideoEnabled() = 0; DECLARE_INTERFACE_DESCRIPTOR(u"IStreamCapture"); diff --git a/services/camera_service/binder/client/include/hstream_capture_proxy.h b/services/camera_service/binder/client/include/hstream_capture_proxy.h index 61250181f..8cfa53f6e 100644 --- a/services/camera_service/binder/client/include/hstream_capture_proxy.h +++ b/services/camera_service/binder/client/include/hstream_capture_proxy.h @@ -39,10 +39,12 @@ public: int32_t SetThumbnail(bool isEnabled, const sptr &producer) override; + int32_t SetRawPhotoStreamInfo(const sptr &producer) override; + int32_t DeferImageDeliveryFor(int32_t type) override; int32_t IsDeferredPhotoEnabled() override; - + int32_t IsDeferredVideoEnabled() override; private: diff --git a/services/camera_service/binder/client/src/hstream_capture_proxy.cpp b/services/camera_service/binder/client/src/hstream_capture_proxy.cpp index c3efcab0a..471ee2e2c 100644 --- a/services/camera_service/binder/client/src/hstream_capture_proxy.cpp +++ b/services/camera_service/binder/client/src/hstream_capture_proxy.cpp @@ -138,6 +138,28 @@ int32_t HStreamCaptureProxy::SetThumbnail(bool isEnabled, const sptr &producer) +{ + MessageParcel data; + MessageParcel reply; + MessageOption option; + + if (producer == nullptr) { + MEDIA_ERR_LOG("HStreamCaptureProxy SetRawPhotoStreamInfo producer is null"); + return IPC_PROXY_ERR; + } + + data.WriteInterfaceToken(GetDescriptor()); + data.WriteRemoteObject(producer->AsObject()); + + int error = Remote()->SendRequest( + static_cast(StreamCaptureInterfaceCode::CAMERA_STREAM_SET_RAW_PHOTO_INFO), data, reply, option); + if (error != ERR_NONE) { + MEDIA_ERR_LOG("HStreamCaptureProxy SetRawPhotoStreamInfo failed, error: %{public}d", error); + } + return error; +} + int32_t HStreamCaptureProxy::DeferImageDeliveryFor(int32_t type) { MessageParcel data; diff --git a/services/camera_service/binder/server/include/hstream_capture_stub.h b/services/camera_service/binder/server/include/hstream_capture_stub.h index 496b51180..475e45ece 100644 --- a/services/camera_service/binder/server/include/hstream_capture_stub.h +++ b/services/camera_service/binder/server/include/hstream_capture_stub.h @@ -32,6 +32,7 @@ public: int32_t HandleCapture(MessageParcel& data); int32_t HandleSetCallback(MessageParcel& data); int32_t HandleSetThumbnail(MessageParcel& data); + int32_t HandleSetRawPhotoInfo(MessageParcel& data); int32_t HandleEnableDeferredType(MessageParcel& data); }; } // namespace CameraStandard diff --git a/services/camera_service/binder/server/src/hstream_capture_stub.cpp b/services/camera_service/binder/server/src/hstream_capture_stub.cpp index 33b68ede3..e05f8a2cc 100644 --- a/services/camera_service/binder/server/src/hstream_capture_stub.cpp +++ b/services/camera_service/binder/server/src/hstream_capture_stub.cpp @@ -58,6 +58,9 @@ int HStreamCaptureStub::OnRemoteRequest( case static_cast(StreamCaptureInterfaceCode::CAMERA_STREAM_GET_DEFERRED_VIDEO): errCode = IsDeferredVideoEnabled(); break; + case static_cast(StreamCaptureInterfaceCode::CAMERA_STREAM_SET_RAW_PHOTO_INFO): + errCode = HandleSetRawPhotoInfo(data); + break; default: MEDIA_ERR_LOG("HStreamCaptureStub request code %{public}u not handled", code); errCode = IPCObjectStub::OnRemoteRequest(code, data, reply, option); @@ -89,6 +92,17 @@ int32_t HStreamCaptureStub::HandleSetThumbnail(MessageParcel &data) return ret; } +int32_t HStreamCaptureStub::HandleSetRawPhotoInfo(MessageParcel &data) +{ + sptr remoteObj = data.ReadRemoteObject(); + CHECK_AND_RETURN_RET_LOG(remoteObj != nullptr, IPC_STUB_INVALID_DATA_ERR, + "HStreamCaptureStub HandleCreatePhotoOutput BufferProducer is null"); + sptr producer = iface_cast(remoteObj); + int32_t ret = SetRawPhotoStreamInfo(producer); + MEDIA_DEBUG_LOG("HStreamCaptureStub HandleSetThumbnail result: %{public}d", ret); + return ret; +} + int32_t HStreamCaptureStub::HandleEnableDeferredType(MessageParcel &data) { int32_t type = data.ReadInt32(); diff --git a/services/camera_service/include/hcamera_service.h b/services/camera_service/include/hcamera_service.h index 91caba674..7f4b8aac2 100644 --- a/services/camera_service/include/hcamera_service.h +++ b/services/camera_service/include/hcamera_service.h @@ -19,6 +19,7 @@ #include #include #include +#include #include "camera_util.h" #include "hcamera_device.h" @@ -46,12 +47,15 @@ using namespace OHOS::HDI::Camera::V1_0; using namespace DeferredProcessing; struct CameraMetaInfo { string cameraId; + uint8_t cameraType; uint8_t position; uint8_t connectionType; + std::vector supportModes; shared_ptr cameraAbility; - CameraMetaInfo(string cameraId, uint8_t position, uint8_t connectionType, - shared_ptr cameraAbility) - : cameraId(cameraId), position(position), connectionType (connectionType), cameraAbility(cameraAbility) {} + CameraMetaInfo(string cameraId, uint8_t cameraType, uint8_t position, uint8_t connectionType, + std::vector supportModes, shared_ptr cameraAbility) + : cameraId(cameraId), cameraType(cameraType), position(position), + connectionType(connectionType), supportModes(supportModes), cameraAbility(cameraAbility) {} }; class HCameraService : public SystemAbility, public HCameraServiceStub, public HCameraHostManager::StatusCallback { DECLARE_SYSTEM_ABILITY(HCameraService); @@ -157,6 +161,8 @@ private: void CameraDumpPrelaunch(common_metadata_header_t* metadataEntry, string& dumpString); void CameraDumpThumbnail(common_metadata_header_t* metadataEntry, string& dumpString); vector> ChooseDeFaultCameras(vector> cameraInfos); + vector> ChoosePhysicalCameras(const vector>& cameraInfos, + const vector>& choosedCameras); bool IsCameraMuteSupported(string cameraId); bool IsPrelaunchSupported(string cameraId); int32_t UpdateMuteSetting(sptr cameraDevice, bool muteMode); diff --git a/services/camera_service/include/hstream_capture.h b/services/camera_service/include/hstream_capture.h index 061388b23..23acd28da 100644 --- a/services/camera_service/include/hstream_capture.h +++ b/services/camera_service/include/hstream_capture.h @@ -40,6 +40,7 @@ public: std::shared_ptr cameraAbility) override; void SetStreamInfo(StreamInfo_V1_1 &streamInfo) override; int32_t SetThumbnail(bool isEnabled, const sptr &producer) override; + int32_t SetRawPhotoStreamInfo(const sptr &producer) override; int32_t DeferImageDeliveryFor(int32_t type) override; int32_t Capture(const std::shared_ptr &captureSettings) override; int32_t CancelCapture() override; @@ -71,6 +72,7 @@ private: std::mutex callbackLock_; int32_t thumbnailSwitch_; sptr thumbnailBufferQueue_; + sptr rawBufferQueue_; int32_t modeName_; int32_t deferredPhotoSwitch_; int32_t deferredVideoSwitch_; diff --git a/services/camera_service/src/hcamera_service.cpp b/services/camera_service/src/hcamera_service.cpp index 8447d31ed..877a98d9a 100644 --- a/services/camera_service/src/hcamera_service.cpp +++ b/services/camera_service/src/hcamera_service.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include "access_token.h" #include "accesstoken_kit.h" @@ -144,10 +145,16 @@ int32_t HCameraService::GetCameras( ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_CONTROL_CAPTURE_MIRROR_SUPPORTED, &item); bool isMirrorSupported = (ret == CAM_META_SUCCESS) ? ((item.data.u8[0] == 1) || (item.data.u8[0] == 0)) : false; + ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_MODES, &item); + std::vector supportModes = {}; + for (uint32_t i = 0; i < item.count; i++) { + supportModes.push_back(item.data.u8[i]); + } CAMERA_SYSEVENT_STATISTIC(CreateMsg("CameraManager GetCameras camera ID:%s, Camera position:%d," " Camera Type:%d, Connection Type:%d, Mirror support:%d", id.c_str(), cameraPosition, cameraType, connectionType, isMirrorSupported)); - cameraInfos.emplace_back(make_shared(id, cameraPosition, connectionType, cameraAbility)); + cameraInfos.emplace_back(make_shared(id, cameraType, cameraPosition, + connectionType, supportModes, cameraAbility)); } FillCameras(cameraInfos, cameraIds, cameraAbilityList); return ret; @@ -157,12 +164,50 @@ void HCameraService::FillCameras(vector>& cameraInfos vector& cameraIds, vector>& cameraAbilityList) { vector> choosedCameras = ChooseDeFaultCameras(cameraInfos); + vector> physicalCameras = ChoosePhysicalCameras(cameraInfos, choosedCameras); cameraIds.clear(); cameraAbilityList.clear(); for (const auto& camera: choosedCameras) { cameraIds.emplace_back(camera->cameraId); cameraAbilityList.emplace_back(camera->cameraAbility); } + for (const auto& camera: physicalCameras) { + cameraIds.emplace_back(camera->cameraId); + cameraAbilityList.emplace_back(camera->cameraAbility); + } +} + +vector> HCameraService::ChoosePhysicalCameras( + const vector>& cameraInfos, const vector>& choosedCameras) +{ + std::vector supportedPhysicalCamerasModes = { + OHOS::HDI::Camera::V1_3::OperationMode::PROFESSIONAL_PHOTO, + OHOS::HDI::Camera::V1_3::OperationMode::PROFESSIONAL_VIDEO, + }; + vector> physicalCameras; + for (auto& camera : cameraInfos) { + if (std::any_of(choosedCameras.begin(), choosedCameras.end(), [camera](const auto& defaultCamera) { + return camera->cameraId == defaultCamera->cameraId; + }) + ) { + MEDIA_INFO_LOG("ChoosePhysicalCameras alreadly has default camera"); + } else { + MEDIA_INFO_LOG("ChoosePhysicalCameras camera ID:%s, CameraType: %{public}d, Camera position:%{public}d, " + "Connection Type:%{public}d", + camera->cameraId.c_str(), camera->cameraType, camera->position, camera->connectionType); + + bool isSupportPhysicalCamera = std::any_of(camera->supportModes.begin(), camera->supportModes.end(), + [&supportedPhysicalCamerasModes](auto mode) -> bool { + return any_of(supportedPhysicalCamerasModes.begin(), supportedPhysicalCamerasModes.end(), + [mode](auto it)-> bool { return it == mode; }); + }); + if (camera->cameraType != camera_type_enum_t::OHOS_CAMERA_TYPE_UNSPECIFIED && isSupportPhysicalCamera) { + physicalCameras.emplace_back(camera); + MEDIA_INFO_LOG("ChoosePhysicalCameras add camera ID:%{public}s", camera->cameraId.c_str()); + } + } + } + return physicalCameras; } vector> HCameraService::ChooseDeFaultCameras(vector> cameraInfos) @@ -1356,7 +1401,7 @@ std::shared_ptr HCameraService::CreateDefaultSetti uint8_t stabilizationMode_ = item.data.u8[0]; defaultSettings->addEntry(OHOS_CONTROL_VIDEO_STABILIZATION_MODE, &stabilizationMode_, count); } - + ret = OHOS::Camera::FindCameraMetadataItem(currentSetting->get(), OHOS_CONTROL_DEFERRED_IMAGE_DELIVERY, &item); if (ret == CAM_META_SUCCESS) { uint8_t deferredType = item.data.u8[0]; diff --git a/services/camera_service/src/hstream_capture.cpp b/services/camera_service/src/hstream_capture.cpp index bdf398f85..464c790ea 100644 --- a/services/camera_service/src/hstream_capture.cpp +++ b/services/camera_service/src/hstream_capture.cpp @@ -60,13 +60,23 @@ void HStreamCapture::SetStreamInfo(StreamInfo_V1_1 &streamInfo) streamInfo.v1_0.intent_ = STILL_CAPTURE; streamInfo.v1_0.encodeType_ = ENCODE_TYPE_JPEG; HDI::Camera::V1_1::ExtendedStreamInfo extendedStreamInfo; - extendedStreamInfo.type = HDI::Camera::V1_1::EXTENDED_STREAM_INFO_QUICK_THUMBNAIL; - extendedStreamInfo.bufferQueue = thumbnailBufferQueue_; // quickThumbnial do not need these param extendedStreamInfo.width = 0; extendedStreamInfo.height = 0; extendedStreamInfo.format = 0; extendedStreamInfo.dataspace = 0; + if (format_ == OHOS_CAMERA_FORMAT_DNG) { + MEDIA_INFO_LOG("HStreamCapture::SetStreamInfo Set DNG info, streamId:%{public}d", GetFwkStreamId()); + extendedStreamInfo.type = + static_cast(HDI::Camera::V1_3::EXTENDED_STREAM_INFO_RAW); + extendedStreamInfo.bufferQueue = rawBufferQueue_; + extendedStreamInfo.width = width_; + extendedStreamInfo.height = height_; + extendedStreamInfo.format = format_; + } else { + extendedStreamInfo.type = HDI::Camera::V1_1::EXTENDED_STREAM_INFO_QUICK_THUMBNAIL; + extendedStreamInfo.bufferQueue = thumbnailBufferQueue_; + } streamInfo.extendedStreamInfos = {extendedStreamInfo}; } @@ -83,6 +93,18 @@ int32_t HStreamCapture::SetThumbnail(bool isEnabled, const sptr &producer) +{ + if (producer != nullptr) { + rawBufferQueue_ = new BufferProducerSequenceable(producer); + } else { + rawBufferQueue_ = nullptr; + } + MEDIA_DEBUG_LOG("HStreamCapture::SetRawPhotoStreamInfo rawBufferQueue whether is nullptr: %{public}d", + rawBufferQueue_ == nullptr); + return CAMERA_OK; +} + int32_t HStreamCapture::DeferImageDeliveryFor(int32_t type) { MEDIA_INFO_LOG("HStreamCapture::DeferImageDeliveryFor type: %{public}d", type); @@ -165,7 +187,7 @@ int32_t HStreamCapture::Capture(const std::shared_ptr(item.data.u8[0]); } } - + int32_t NightMode = 4; if (GetMode() == NightMode && cameraPosition == OHOS_CAMERA_POSITION_BACK) { return ret; -- Gitee From 23e5ca7794578637370898f30e05dbd485dad1cc Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Wed, 24 Apr 2024 18:43:04 +0800 Subject: [PATCH 2/8] fix code check and reviews Signed-off-by: lvxiaoqiang Change-Id: I9da0ce3a6cfd3296a1ad3bed0eacab1ed216954a --- .../src/input/camera_manager_napi.cpp | 12 ++++---- .../src/mode/profession_session_napi.cpp | 4 +-- .../src/output/photo_output_napi.cpp | 6 +++- .../src/session/camera_session_napi.cpp | 4 +-- .../camera/src/session/profession_session.cpp | 6 ++-- .../src/camera_framework_moduletest.cpp | 11 +++---- .../include/session/capture_scene_const.h | 12 +++++--- interfaces/inner_api/native/test/BUILD.gn | 30 +++++++++++++++---- .../native/test/camera_capture_profession.cpp | 2 -- .../include/mode/profession_session_napi.h | 6 ++-- 10 files changed, 60 insertions(+), 33 deletions(-) diff --git a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp index 01309a1af..917beb008 100644 --- a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp @@ -502,24 +502,24 @@ napi_value CameraManagerNapi::CreateSessionInstance(napi_env env, napi_callback_ napi_get_value_int32(env, argv[PARAM0], &jsModeName); MEDIA_INFO_LOG("CameraManagerNapi::CreateSessionInstance mode = %{public}d", jsModeName); switch (jsModeName) { - case SceneMode::CAPTURE: + case JsSceneMode::JS_CAPTURE: result = CameraNapiSecurity::CheckSystemApp(env, false) ? PhotoSessionForSysNapi::CreateCameraSession(env) : PhotoSessionNapi::CreateCameraSession(env); break; - case SceneMode::VIDEO: + case JsSceneMode::JS_VIDEO: result = CameraNapiSecurity::CheckSystemApp(env, false) ? VideoSessionForSysNapi::CreateCameraSession(env) : VideoSessionNapi::CreateCameraSession(env); break; - case SceneMode::PORTRAIT: + case JsSceneMode::JS_PORTRAIT: result = PortraitSessionNapi::CreateCameraSession(env); break; - case SceneMode::NIGHT: + case JsSceneMode::JS_NIGHT: result = NightSessionNapi::CreateCameraSession(env); break; - case SceneMode::PROFESSIONAL_PHOTO: + case JsSceneMode::JS_PROFESSIONAL_PHOTO: result = ProfessionSessionNapi::CreateCameraSession(env, SceneMode::PROFESSIONAL_PHOTO); break; - case SceneMode::PROFESSIONAL_VIDEO: + case JsSceneMode::JS_PROFESSIONAL_VIDEO: result = ProfessionSessionNapi::CreateCameraSession(env, SceneMode::PROFESSIONAL_VIDEO); break; default: diff --git a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp index 9f21d56a4..3a9f14323 100644 --- a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2022 Huawei Device Co., Ltd. + * Copyright (c) 2024-2024 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -197,7 +197,7 @@ napi_value ProfessionSessionNapi::ProfessionSessionNapiConstructor(napi_env env, ProfessionSessionNapi::ProfessionSessionNapiDestructor, nullptr, nullptr); if (status == napi_ok) { obj.release(); - return thisVar; + return thisVar; } else { MEDIA_ERR_LOG("ProfessionSessionNapi Failure wrapping js to native napi"); } diff --git a/frameworks/js/camera_napi/src/output/photo_output_napi.cpp b/frameworks/js/camera_napi/src/output/photo_output_napi.cpp index bbd97c65e..481d992a5 100644 --- a/frameworks/js/camera_napi/src/output/photo_output_napi.cpp +++ b/frameworks/js/camera_napi/src/output/photo_output_napi.cpp @@ -1891,8 +1891,12 @@ void PhotoOutputNapi::RegisterPhotoAvailableCallbackListener( photoListener_ = photoListener; } photoListener_->SaveCallbackReference(CONST_CAPTURE_PHOTO_AVAILABLE, callback); - if (rawPhotoListener_ == nullptr && profile_.GetCameraFormat() == CAMERA_FORMAT_DNG) { + if (photoOutput_ != nullptr && rawPhotoListener_ == nullptr && profile_.GetCameraFormat() == CAMERA_FORMAT_DNG) { MEDIA_INFO_LOG("new rawPhotoListener and register surface consumer listener"); + if (photoOutput_->rawPhotoSurface_ == nullptr) { + MEDIA_ERR_LOG("rawPhotoSurface_ is null!"); + return; + } sptr rawPhotoListener = new (std::nothrow) RawPhotoListener(env, photoOutput_->rawPhotoSurface_); SurfaceError ret = photoOutput_->rawPhotoSurface_->RegisterConsumerListener( diff --git a/frameworks/js/camera_napi/src/session/camera_session_napi.cpp b/frameworks/js/camera_napi/src/session/camera_session_napi.cpp index c35acb4d8..89e20765b 100644 --- a/frameworks/js/camera_napi/src/session/camera_session_napi.cpp +++ b/frameworks/js/camera_napi/src/session/camera_session_napi.cpp @@ -99,8 +99,8 @@ const std::vector CameraSessionNapi::focus_props = { }; const std::vector CameraSessionNapi::manual_focus_props = { - DECLARE_NAPI_FUNCTION("getFocusDistance", CameraSessionNapi::GetFocusDistance), - DECLARE_NAPI_FUNCTION("setFocusDistance", CameraSessionNapi::SetFocusDistance), + DECLARE_NAPI_FUNCTION("getFocusDistance", CameraSessionNapi::GetFocusDistance), + DECLARE_NAPI_FUNCTION("setFocusDistance", CameraSessionNapi::SetFocusDistance), }; const std::vector CameraSessionNapi::zoom_props = { diff --git a/frameworks/native/camera/src/session/profession_session.cpp b/frameworks/native/camera/src/session/profession_session.cpp index 8a6021604..9e4abfa7e 100644 --- a/frameworks/native/camera/src/session/profession_session.cpp +++ b/frameworks/native/camera/src/session/profession_session.cpp @@ -373,13 +373,13 @@ int32_t ProfessionSession::GetSensorExposureTimeRange(std::vector &sen for (uint32_t i = 0; i < item.count; i++) { numerator = item.data.r[i].numerator; denominator = item.data.r[i].denominator; - value = numerator / (denominator / timeUnit); - MEDIA_DEBUG_LOG("ProfessionSession::GetSensorExposureTimeRange numerator=%{public}d, denominator=%{public}d," - " value=%{public}d", numerator, denominator, value); if (denominator == 0) { MEDIA_ERR_LOG("ProfessionSession::GetSensorExposureTimeRange divide by 0! numerator=%{public}d", numerator); return CameraErrorCode::INVALID_ARGUMENT; } + value = numerator / (denominator / timeUnit); + MEDIA_DEBUG_LOG("ProfessionSession::GetSensorExposureTimeRange numerator=%{public}d, denominator=%{public}d," + " value=%{public}d", numerator, denominator, value); sensorExposureTimeRange.emplace_back(value); } MEDIA_INFO_LOG("ProfessionSessionNapi::GetSensorExposureTimeRange range=%{public}s, len = %{public}zu", diff --git a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp index 695e07241..5c5c72e51 100644 --- a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp +++ b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp @@ -636,8 +636,9 @@ SelectProfiles CameraFrameworkModuleTest::SelectWantedProfiles( sptr& modeAbility, const SelectProfiles wanted) { SelectProfiles ret; - const auto& preview = std::find_if(modeAbility->GetPreviewProfiles().begin(), modeAbility->GetPreviewProfiles().end(), - [&wanted](auto& profile) { return profile == wanted.preview; }); + const auto& preview = std::find_if(modeAbility->GetPreviewProfiles().begin(), + modeAbility->GetPreviewProfiles().end(), + [&wanted](auto& profile) { return profile == wanted.preview; }); if (preview != modeAbility->GetPreviewProfiles().end()) { ret.preview = *preview; } @@ -3237,12 +3238,12 @@ HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_075, ASSERT_NE(modeAbility, nullptr); SelectProfiles wanted; - wanted.preview.size_ = {640,480}; + wanted.preview.size_ = {640, 480}; wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.size_ = {640,480}; + wanted.video.size_ = {640, 480}; wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.framerates_ = {30,30}; + wanted.video.framerates_ = {30, 30}; SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); ASSERT_NE(profiles.preview.format_, -1); diff --git a/interfaces/inner_api/native/camera/include/session/capture_scene_const.h b/interfaces/inner_api/native/camera/include/session/capture_scene_const.h index a7af2ca81..c4fccded0 100644 --- a/interfaces/inner_api/native/camera/include/session/capture_scene_const.h +++ b/interfaces/inner_api/native/camera/include/session/capture_scene_const.h @@ -28,9 +28,12 @@ enum JsSceneMode : int32_t { JS_VIDEO = 2, JS_PORTRAIT = 3, JS_NIGHT = 4, + JS_PROFESSIONAL_PHOTO = 5, + JS_PROFESSIONAL_VIDEO = 6, JS_SLOW_MOTION = 7, - JS_PROFESSIONAL_PHOTO = 11, - JS_PROFESSIONAL_VIDEO = 12, + JS_CAPTURE_MARCO = 8, + JS_VIDEO_MARCO = 9, + JS_HIGH_RES_PHOTO = 11, }; enum SceneMode : int32_t { @@ -40,13 +43,14 @@ enum SceneMode : int32_t { PORTRAIT = 3, NIGHT = 4, PROFESSIONAL = 5, - SLOW_MOTION = 6, SCAN = 7, CAPTURE_MACRO = 8, VIDEO_MACRO = 9, + SLOW_MOTION = 10, PROFESSIONAL_PHOTO = 11, PROFESSIONAL_VIDEO = 12, - HIGH_FRAME_RATE = 13 + HIGH_FRAME_RATE = 13, + HIGH_RES_PHOTO = 14, }; enum SceneFeature : int32_t { diff --git a/interfaces/inner_api/native/test/BUILD.gn b/interfaces/inner_api/native/test/BUILD.gn index 955f162c7..b3a2506bb 100644 --- a/interfaces/inner_api/native/test/BUILD.gn +++ b/interfaces/inner_api/native/test/BUILD.gn @@ -51,7 +51,11 @@ ohos_executable("camera_video") { ] cflags += [ "-Wall" ] cflags_cc = cflags - + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -89,7 +93,11 @@ ohos_executable("camera_capture") { ] cflags += [ "-Wall" ] cflags_cc = cflags - + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -124,7 +132,11 @@ ohos_executable("camera_capture_video") { ] cflags += [ "-Wall" ] cflags_cc = cflags - + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -159,7 +171,11 @@ ohos_executable("camera_capture_mode") { ] cflags += [ "-Wall" ] cflags_cc = cflags - + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -192,7 +208,11 @@ ohos_executable("camera_capture_profession") { "-fPIC", "-g", ] - + sanitize = { + cfi = true + cfi_cross_dso = true + debug = false + } cflags += [ "-Wall" ] cflags_cc = cflags diff --git a/interfaces/inner_api/native/test/camera_capture_profession.cpp b/interfaces/inner_api/native/test/camera_capture_profession.cpp index 05d359f37..25d852938 100644 --- a/interfaces/inner_api/native/test/camera_capture_profession.cpp +++ b/interfaces/inner_api/native/test/camera_capture_profession.cpp @@ -42,8 +42,6 @@ std::map g_abilityIdStr_ = { int main(int argc, char **argv) { cout<<"-----------------version:20240322-----------------"< Date: Wed, 24 Apr 2024 20:34:58 +0800 Subject: [PATCH 3/8] remove native demo Signed-off-by: lvxiaoqiang Change-Id: I5cdf1ca1dca70e7655f4f5fab98e1f94e2d61444 --- .../src/input/camera_manager_napi.cpp | 2 +- .../camera/src/session/capture_session.cpp | 8 +- interfaces/inner_api/native/test/BUILD.gn | 64 +-- .../native/test/camera_capture_profession.cpp | 380 ------------------ 4 files changed, 9 insertions(+), 445 deletions(-) delete mode 100644 interfaces/inner_api/native/test/camera_capture_profession.cpp diff --git a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp index 917beb008..4ddc8e614 100644 --- a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp @@ -886,7 +886,7 @@ napi_value CameraManagerNapi::GetSupportedCameras(napi_env env, napi_callback_in if (status == napi_ok && cameraManagerNapi != nullptr) { std::vector> cameraObjList = cameraManagerNapi->cameraManager_->GetSupportedCameras(); std::vector> selectedCameraList; - if (!CameraNapiSecurity::CheckSystemApp(env)) { + if (!CameraNapiSecurity::CheckSystemApp(env, false)) { std::copy_if(cameraObjList.begin(), cameraObjList.end(), std::back_inserter(selectedCameraList), [](const auto& it) { return it->GetCameraType() == CAMERA_TYPE_UNSUPPORTED || it->GetCameraType() == CAMERA_TYPE_DEFAULT; diff --git a/frameworks/native/camera/src/session/capture_session.cpp b/frameworks/native/camera/src/session/capture_session.cpp index 5efd4deff..b5b886a31 100644 --- a/frameworks/native/camera/src/session/capture_session.cpp +++ b/frameworks/native/camera/src/session/capture_session.cpp @@ -3012,7 +3012,7 @@ float CaptureSession::GetMinimumFocusDistance() int32_t CaptureSession::GetFocusDistance(float& focusDistance) { - focusDistance = 0; + focusDistance = 0.0; if (!IsSessionCommited()) { MEDIA_ERR_LOG("CaptureSession::GetFocusDistance Session is not Commited"); return CameraErrorCode::SESSION_NOT_CONFIG; @@ -3033,7 +3033,7 @@ int32_t CaptureSession::GetFocusDistance(float& focusDistance) MEDIA_ERR_LOG("CaptureSession::GetFocusDistance minimum distance is 0"); return CameraErrorCode::SUCCESS; } - focusDistance = 1- (item.data.f[0] / GetMinimumFocusDistance()); + focusDistance = 1.0 - (item.data.f[0] / GetMinimumFocusDistance()); MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance focusDistance = %{public}f", focusDistance); return CameraErrorCode::SUCCESS; } @@ -3056,9 +3056,9 @@ int32_t CaptureSession::SetFocusDistance(float focusDistance) MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance app set focusDistance = %{public}f", focusDistance); camera_metadata_item_t item; if (focusDistance < 0) { - focusDistance = 0; + focusDistance = 0.0; } else if (focusDistance > 1) { - focusDistance = 1; + focusDistance = 1.0; } float value = (1 - focusDistance) * GetMinimumFocusDistance(); MEDIA_DEBUG_LOG("CaptureSession::GetFocusDistance meta set focusDistance = %{public}f", value); diff --git a/interfaces/inner_api/native/test/BUILD.gn b/interfaces/inner_api/native/test/BUILD.gn index b3a2506bb..8d909242e 100644 --- a/interfaces/inner_api/native/test/BUILD.gn +++ b/interfaces/inner_api/native/test/BUILD.gn @@ -51,11 +51,7 @@ ohos_executable("camera_video") { ] cflags += [ "-Wall" ] cflags_cc = cflags - sanitize = { - cfi = true - cfi_cross_dso = true - debug = false - } + configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -93,11 +89,7 @@ ohos_executable("camera_capture") { ] cflags += [ "-Wall" ] cflags_cc = cflags - sanitize = { - cfi = true - cfi_cross_dso = true - debug = false - } + configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -132,11 +124,7 @@ ohos_executable("camera_capture_video") { ] cflags += [ "-Wall" ] cflags_cc = cflags - sanitize = { - cfi = true - cfi_cross_dso = true - debug = false - } + configs = [ ":camera_config" ] deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] @@ -171,50 +159,6 @@ ohos_executable("camera_capture_mode") { ] cflags += [ "-Wall" ] cflags_cc = cflags - sanitize = { - cfi = true - cfi_cross_dso = true - debug = false - } - configs = [ ":camera_config" ] - - deps = [ "${multimedia_camera_framework_path}/frameworks/native/camera:camera_framework" ] - - external_deps = [ - "access_token:libaccesstoken_sdk", - "access_token:libnativetoken", - "access_token:libtoken_setproc", - "c_utils:utils", - "drivers_interface_camera:libcamera_proxy_1.0", - "drivers_interface_camera:libcamera_proxy_1.1", - "drivers_interface_camera:metadata", - "graphic_surface:surface", - "hilog:libhilog", - "hisysevent:libhisysevent", - "hitrace:hitrace_meter", - "ipc:ipc_core", - ] - part_name = "camera_framework" - subsystem_name = "multimedia" -} - -ohos_executable("camera_capture_profession") { - install_enable = false - sources = [ - "camera_capture_profession.cpp", - "test_common.cpp", - ] - cflags = [ - "-fPIC", - "-g", - ] - sanitize = { - cfi = true - cfi_cross_dso = true - debug = false - } - cflags += [ "-Wall" ] - cflags_cc = cflags configs = [ ":camera_config" ] @@ -236,4 +180,4 @@ ohos_executable("camera_capture_profession") { ] part_name = "camera_framework" subsystem_name = "multimedia" -} +} \ No newline at end of file diff --git a/interfaces/inner_api/native/test/camera_capture_profession.cpp b/interfaces/inner_api/native/test/camera_capture_profession.cpp deleted file mode 100644 index 25d852938..000000000 --- a/interfaces/inner_api/native/test/camera_capture_profession.cpp +++ /dev/null @@ -1,380 +0,0 @@ -/* - * Copyright (c) 2023-2023 Huawei Device Co., Ltd. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -#include "input/camera_input.h" -#include "input/camera_manager.h" -#include "output/camera_output_capability.h" - -#include "camera_log.h" -#include "surface.h" -#include "test_common.h" - -#include "ipc_skeleton.h" -#include "access_token.h" -#include "hap_token_info.h" -#include "accesstoken_kit.h" -#include "nativetoken_kit.h" -#include "token_setproc.h" -using namespace std; -using namespace OHOS; -using namespace OHOS::CameraStandard; - -std::map g_abilityIdStr_ = { - {536870912, "OHOS_ABILITY_SCENE_FILTER_TYPES"}, - {536870914, "OHOS_ABILITY_SCENE_PORTRAIT_EFFECT_TYPES"}, - {536870916, "OHOS_ABILITY_SCENE_BEAUTY_TYPES"} -}; -int main(int argc, char **argv) -{ - cout<<"-----------------version:20240322-----------------"< camManagerObj = CameraManager::GetInstance(); - MEDIA_INFO_LOG("Setting callback to listen camera status and flash status"); - camManagerObj->SetCallback(std::make_shared(testName)); - std::vector> cameraObjList = camManagerObj->GetSupportedCameras(); - if (cameraObjList.size() == 0) { - return 0; - } - sptr device = cameraObjList[0]; - MEDIA_INFO_LOG("Camera ID count: %{public}zu", cameraObjList.size()); - for (auto& it : cameraObjList) { - MEDIA_INFO_LOG("Camera ID: %{public}s", it->GetID().c_str()); - MEDIA_INFO_LOG("Camera Posion: %{public}d", it->GetPosition()); - if (devicePosion == 0 && it->GetPosition() == CameraPosition::CAMERA_POSITION_BACK) { - device = it; - break; - } - if (devicePosion == 1 && it->GetPosition() == CameraPosition::CAMERA_POSITION_FRONT) { - device = it; - break; - } - } - cout<<"Camera ID ="<GetID()<<",camera Position = "<GetPosition()< supportedModes = camManagerObj->GetSupportedModes(device); - std::string modes = ""; - for (auto mode : supportedModes) { - modes += std::to_string(static_cast(mode)) + " , "; - } - MEDIA_INFO_LOG("supportedModes : %{public}s", modes.c_str()); - sptr captureSession = camManagerObj->CreateCaptureSession(SceneMode::PROFESSIONAL_VIDEO); - sptr professionSession = nullptr; - professionSession = static_cast (captureSession.GetRefPtr()); - if (professionSession == nullptr) { - return 0; - } - professionSession->BeginConfig(); - sptr captureInput = camManagerObj->CreateCameraInput(device); - if (captureInput == nullptr) { - return 0; - } - - sptr cameraInput = (sptr &)captureInput; - cameraInput->Open(); - std::vector photoProfiles = {}; - std::vector previewProfiles = {}; - std::vector videoProfiles = {}; - string abilityIds = ""; - if (isResolutionConfigured) { - std::vector previewFormats; - std::vector photoFormats; - std::vector videoFormats; - std::vector previewSizes; - std::vector photoSizes; - std::vector videoSizes; - sptr outputcapability = - camManagerObj->GetSupportedOutputCapability(device, SceneMode::PROFESSIONAL_VIDEO); - previewProfiles = outputcapability->GetPreviewProfiles(); - uint32_t profileIndex = 0; - for (auto i : previewProfiles) { - previewFormats.push_back(i.GetCameraFormat()); - previewSizes.push_back(i.GetSize()); - - abilityIds = ""; - for (auto id : i.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("index(%{public}d), preview profile f(%{public}d), w(%{public}d), h(%{public}d) " - "support ability: %{public}s", profileIndex++, - i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); - } - - photoProfiles = outputcapability->GetPhotoProfiles(); - profileIndex = 0; - for (auto i : photoProfiles) { - photoFormats.push_back(i.GetCameraFormat()); - photoSizes.push_back(i.GetSize()); - abilityIds = ""; - for (auto id : i.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("index %{public}d, photo support format : %{public}d, width: %{public}d, height: %{public}d" - "support ability: %{public}s", profileIndex++, - i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); - } - - videoProfiles = outputcapability->GetVideoProfiles(); - profileIndex = 0; - for (auto i : videoProfiles) { - videoFormats.push_back(i.GetCameraFormat()); - videoSizes.push_back(i.GetSize()); - - abilityIds = ""; - for (auto id : i.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("index(%{public}d), video profile f(%{public}d), w(%{public}d), h(%{public}d) " - "support ability: %{public}s", profileIndex++, - i.GetCameraFormat(), i.GetSize().width, i.GetSize().height, abilityIds.c_str()); - } - } - - MEDIA_INFO_LOG("photoCaptureCount: %{public}d", photoCaptureCount); - - cameraInput->SetErrorCallback(std::make_shared(testName)); - ret = professionSession->AddInput(captureInput); - if (ret != 0) { - return 0; - } - - sptr photoSurface = IConsumerSurface::Create(); - if (photoSurface == nullptr) { - return 0; - } - photosize.width = photoWidth; - photosize.height = photoHeight; - Profile photoprofile; - for (auto it : photoProfiles) { - if (it.GetSize().width == photosize.width && it.GetSize().height == photosize.height - && it.GetCameraFormat() == static_cast(photoFormat)) { - photoprofile = it; - } - } - abilityIds = ""; - for (auto id : photoprofile.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("photo support format : %{public}d, width: %{public}d, height: %{public}d" - "support ability: %{public}s", - photoFormat, photoWidth, photoHeight, abilityIds.c_str()); - cout<< "photoFormat: " << photoFormat << " photoWidth: "<< photoWidth - << " photoHeight: " << photoHeight << " support ability: " << abilityIds.c_str() << endl; - - sptr captureListener = new(std::nothrow) SurfaceListener("Photo", SurfaceType::PHOTO, - photoFd, photoSurface); - photoSurface->RegisterConsumerListener((sptr &)captureListener); - - sptr metaOutput = camManagerObj->CreateMetadataOutput(); - MEDIA_INFO_LOG("Setting Meta callback"); - ((sptr &)metaOutput)->SetCallback(std::make_shared(testName)); - - ret = professionSession->AddOutput(metaOutput); - if (ret != 0) { - return 0; - } - - sptr videoSurface = IConsumerSurface::Create(); - if (videoSurface == nullptr) { - return 0; - } - videosize.width = videoWidth; - videosize.height = videoHeight; - VideoProfile videoprofile; - for (auto it : videoProfiles) { - if (it.GetSize().width == videosize.width && it.GetSize().height == videosize.height - && it.GetCameraFormat() == static_cast(videoFormat)) { - videoprofile = it; - } - } - abilityIds = ""; - for (auto id : videoprofile.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("videoFormat: %{public}d, videoWidth: %{public}d, videoHeight: %{public}d" - "support ability: %{public}s", - videoFormat, videoWidth, videoHeight, abilityIds.c_str()); - - cout<< "videoFormat: " << videoFormat << " videoWidth: "<< videoWidth - << " videoHeight: " << videoHeight << " support ability: " << abilityIds.c_str() < listener = new(std::nothrow) SurfaceListener("Video", SurfaceType::VIDEO, - videoFd, videoSurface); - videoSurface->RegisterConsumerListener((sptr &)listener); - sptr videoProducer = videoSurface->GetProducer(); - sptr videoProducerSurface = Surface::CreateSurfaceAsProducer(videoProducer); - sptr videoOutput = camManagerObj->CreateVideoOutput(videoprofile, videoProducerSurface); - if (videoOutput == nullptr) { - return 0; - } - - MEDIA_INFO_LOG("Setting video callback"); - ((sptr &)videoOutput)->SetCallback(std::make_shared(testName)); - ret = professionSession->AddOutput(videoOutput); - if (ret != 0) { - return 0; - } - - sptr previewSurface = IConsumerSurface::Create(); - if (previewSurface == nullptr) { - return 0; - } - previewsize.width = previewWidth; - previewsize.height = previewHeight; - Profile previewprofile; - for (auto it : previewProfiles) { - if (it.GetSize().width == previewsize.width && it.GetSize().height == previewsize.height - && it.GetCameraFormat() == static_cast(previewFormat)) { - previewprofile = it; - } - } - abilityIds = ""; - for (auto id : previewprofile.GetAbilityId()) { - abilityIds += g_abilityIdStr_[id] + "("+std::to_string(id) + ") , "; - } - MEDIA_INFO_LOG("previewFormat: %{public}d, previewWidth: %{public}d, previewHeight: %{public}d" - "support ability: %{public}s", - previewFormat, previewWidth, previewHeight, abilityIds.c_str()); - - cout<< "previewFormat: " << previewFormat << " previewWidth: "<< previewWidth - << " previewHeight: " << previewHeight << " support ability: " << abilityIds.c_str() < videoListener = new(std::nothrow) SurfaceListener("Preview", SurfaceType::PREVIEW, - previewFd, previewSurface); - previewSurface->RegisterConsumerListener((sptr &)videoListener); - sptr previewProducer = previewSurface->GetProducer(); - sptr previewProducerSurface = Surface::CreateSurfaceAsProducer(previewProducer); - sptr previewOutput = camManagerObj->CreatePreviewOutput(previewprofile, previewProducerSurface); - if (previewOutput == nullptr) { - return 0; - } - - MEDIA_INFO_LOG("Setting preview callback"); - ((sptr &)previewOutput)->SetCallback(std::make_shared(testName)); - ret = professionSession->AddOutput(previewOutput); - if (ret != 0) { - return 0; - } - - ret = professionSession->CommitConfig(); - if (ret != 0) { - return 0; - } - - MEDIA_INFO_LOG("Preview started"); - ret = professionSession->Start(); - if (ret != 0) { - return 0; - } - - sleep(previewCaptureGap); - - ret = ((sptr &)videoOutput)->Start(); - if (ret != 0) { - MEDIA_ERR_LOG("Failed to start recording, result: %{public}d", ret); - return ret; - } - sleep(videoDurationGap); - ret = ((sptr &)videoOutput)->Stop(); - if (ret != 0) { - MEDIA_ERR_LOG("Failed to stop recording, result: %{public}d", ret); - return ret; - } - ret = TestUtils::SaveVideoFile(nullptr, 0, VideoSaveMode::CLOSE, videoFd); - videoFd = -1; - sleep(gapAfterCapture); - - MEDIA_INFO_LOG("Closing the session"); - ((sptr &)previewOutput)->Stop(); - professionSession->Stop(); - professionSession->Release(); - cameraInput->Release(); - camManagerObj->SetCallback(nullptr); - - MEDIA_INFO_LOG("Camera new sample end."); - return 0; -} -- Gitee From 7817627671791665c62d1c947901c93706c34e16 Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Wed, 24 Apr 2024 13:05:10 +0000 Subject: [PATCH 4/8] update interfaces/inner_api/native/test/BUILD.gn. Signed-off-by: lvxiaoqiang --- interfaces/inner_api/native/test/BUILD.gn | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/interfaces/inner_api/native/test/BUILD.gn b/interfaces/inner_api/native/test/BUILD.gn index 8d909242e..131267d92 100644 --- a/interfaces/inner_api/native/test/BUILD.gn +++ b/interfaces/inner_api/native/test/BUILD.gn @@ -180,4 +180,4 @@ ohos_executable("camera_capture_mode") { ] part_name = "camera_framework" subsystem_name = "multimedia" -} \ No newline at end of file +} -- Gitee From 6036a0bf704574285b6c79b4a8d3c074b29ca3b5 Mon Sep 17 00:00:00 2001 From: supeng Date: Wed, 24 Apr 2024 14:33:06 +0000 Subject: [PATCH 5/8] update interfaces/kits/native/include/camera/camera.h. Signed-off-by: supeng --- interfaces/kits/native/include/camera/camera.h | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/interfaces/kits/native/include/camera/camera.h b/interfaces/kits/native/include/camera/camera.h index 91a5fd220..75ddae9e5 100644 --- a/interfaces/kits/native/include/camera/camera.h +++ b/interfaces/kits/native/include/camera/camera.h @@ -599,6 +599,11 @@ typedef struct Camera_Device { * Camera connection type attribute. */ Camera_Connection connectionType; + + /** + * Camera orientation. + */ + uint32_t cameraOrientation; } Camera_Device; /** -- Gitee From 92b8bb8a5fde6ea2179b961375bc2fee60f3e2ec Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Thu, 25 Apr 2024 17:09:11 +0800 Subject: [PATCH 6/8] fix code check Signed-off-by: lvxiaoqiang Change-Id: I3fc4a9d12774ac08eaa9b66cc5248b34f1b8059b --- .../src/input/camera_manager_napi.cpp | 34 +++----- .../camera/src/input/camera_manager.cpp | 10 +-- .../camera/src/session/portrait_session.cpp | 18 ++-- .../camera/src/session/profession_session.cpp | 84 +++---------------- .../src/utils/metadata_common_utils.cpp | 39 ++++++++- .../src/camera_framework_moduletest.cpp | 18 ++-- .../include/session/profession_session.h | 1 + .../include/utils/metadata_common_utils.h | 17 ++++ .../camera_service/src/hcamera_service.cpp | 12 ++- 9 files changed, 106 insertions(+), 127 deletions(-) diff --git a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp index 4ddc8e614..ffa65f42d 100644 --- a/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/input/camera_manager_napi.cpp @@ -977,61 +977,51 @@ napi_value CameraManagerNapi::GetSupportedOutputCapability(napi_env env, napi_ca { MEDIA_INFO_LOG("GetSupportedOutputCapability is called"); napi_status status; - napi_value result = nullptr; size_t argc = ARGS_TWO; napi_value argv[ARGS_TWO] = {0}; napi_value thisVar = nullptr; CameraDeviceNapi* cameraDeviceNapi = nullptr; - CameraManagerNapi* cameraManagerNapi = nullptr; - CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar); - napi_get_undefined(env, &result); - status = napi_unwrap(env, thisVar, reinterpret_cast(&cameraManagerNapi)); - if (status != napi_ok || cameraManagerNapi == nullptr) { - MEDIA_ERR_LOG("napi_unwrap( ) failure!"); - return result; - } status = napi_unwrap(env, argv[PARAM0], reinterpret_cast(&cameraDeviceNapi)); if (status != napi_ok || cameraDeviceNapi == nullptr) { - MEDIA_ERR_LOG("Could not able to read cameraId argument!"); + MEDIA_ERR_LOG("napi_unwrap failure!"); return result; } sptr cameraInfo = cameraDeviceNapi->cameraDevice_; if (argc == ARGS_ONE) { result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo); } else if (argc == ARGS_TWO) { - int32_t sceneMode; - napi_get_value_int32(env, argv[PARAM1], &sceneMode); - MEDIA_INFO_LOG("CameraManagerNapi::GetSupportedOutputCapability mode = %{public}d", sceneMode); - switch (sceneMode) { - case SceneMode::CAPTURE: + int32_t jsSceneMode; + napi_get_value_int32(env, argv[PARAM1], &jsSceneMode); + MEDIA_INFO_LOG("CameraManagerNapi::GetSupportedOutputCapability mode = %{public}d", jsSceneMode); + switch (jsSceneMode) { + case JsSceneMode::JS_CAPTURE: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::CAPTURE); break; - case SceneMode::VIDEO: + case JsSceneMode::JS_VIDEO: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::VIDEO); break; - case SceneMode::PORTRAIT: + case JsSceneMode::JS_PORTRAIT: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::PORTRAIT); break; - case SceneMode::NIGHT: + case JsSceneMode::JS_NIGHT: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::NIGHT); break; - case SceneMode::PROFESSIONAL_PHOTO: + case JsSceneMode::JS_PROFESSIONAL_PHOTO: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::PROFESSIONAL_PHOTO); break; - case SceneMode::PROFESSIONAL_VIDEO: + case JsSceneMode::JS_PROFESSIONAL_VIDEO: result = CameraOutputCapabilityNapi::CreateCameraOutputCapability(env, cameraInfo, SceneMode::PROFESSIONAL_VIDEO); break; default: - MEDIA_ERR_LOG("CreateCameraSessionInstance mode = %{public}d not supported", sceneMode); + MEDIA_ERR_LOG("CreateCameraSessionInstance mode = %{public}d not supported", jsSceneMode); break; } } - return result; } diff --git a/frameworks/native/camera/src/input/camera_manager.cpp b/frameworks/native/camera/src/input/camera_manager.cpp index cb910570e..a68e81f50 100644 --- a/frameworks/native/camera/src/input/camera_manager.cpp +++ b/frameworks/native/camera/src/input/camera_manager.cpp @@ -269,8 +269,6 @@ sptr CameraManager::CreateCaptureSession(SceneMode mode) captureSession = new(std::nothrow) PortraitSession(session); break; case SceneMode::PROFESSIONAL_VIDEO: - captureSession = new(std::nothrow) ProfessionSession(session, cameraObjList); - break; case SceneMode::PROFESSIONAL_PHOTO: captureSession = new(std::nothrow) ProfessionSession(session, cameraObjList); break; @@ -1275,12 +1273,8 @@ void CameraManager::CreateProfile4StreamType(OutputCapStreamType streamType, uin Size size{static_cast(detailInfo.width), static_cast(detailInfo.height)}; Fps fps{static_cast(detailInfo.fixedFps), static_cast(detailInfo.minFps), static_cast(detailInfo.maxFps)}; - std::vector abilityId; - abilityId = detailInfo.abilityId; - std::string abilityIds = ""; - for (auto id : abilityId) { - abilityIds += std::to_string(id) + ","; - } + std::vector abilityId = detailInfo.abilityId; + std::string abilityIds = Container2String(abilityId.begin(), abilityId.end()); if (streamType == OutputCapStreamType::PREVIEW) { Profile previewProfile = Profile(format, size, fps, abilityId); MEDIA_DEBUG_LOG("preview format : %{public}d, width: %{public}d, height: %{public}d" diff --git a/frameworks/native/camera/src/session/portrait_session.cpp b/frameworks/native/camera/src/session/portrait_session.cpp index e4c9335e7..1843529f0 100644 --- a/frameworks/native/camera/src/session/portrait_session.cpp +++ b/frameworks/native/camera/src/session/portrait_session.cpp @@ -14,10 +14,11 @@ */ #include "session/portrait_session.h" +#include "camera_log.h" #include "camera_util.h" #include "hcapture_session_callback_stub.h" #include "input/camera_input.h" -#include "camera_log.h" +#include "metadata_common_utils.h" #include "output/photo_output.h" #include "output/preview_output.h" #include "output/video_output.h" @@ -259,7 +260,7 @@ std::vector> PortraitSession::GetSupportedPhysicalApertures() { // The data structure of the supportedPhysicalApertures object is { {zoomMin, zoomMax, // physicalAperture1, physicalAperture2ยทยทยท}, }. - std::vector> supportedPhysicalApertures; + std::vector> supportedPhysicalApertures = {}; if (!IsSessionCommited()) { MEDIA_ERR_LOG("GetSupportedPhysicalApertures Session is not Commited"); return supportedPhysicalApertures; @@ -279,18 +280,21 @@ std::vector> PortraitSession::GetSupportedPhysicalApertures() MEDIA_ERR_LOG("GetSupportedPhysicalApertures Failed with return code %{public}d", ret); return supportedPhysicalApertures; } - int32_t supportedDeviceCount = static_cast(item.data.f[0]); + std::vector chooseModeRange = ParsePhysicalApertureRangeByMode(item, GetMode()); + int32_t deviceCntPos = 1; + int32_t supportedDeviceCount = static_cast(chooseModeRange[deviceCntPos]); if (supportedDeviceCount == 0) { + MEDIA_ERR_LOG("GetSupportedPhysicalApertures Failed meta device count is 0"); return supportedPhysicalApertures; } std::vector tempPhysicalApertures = {}; - for (uint32_t i = 1; i < item.count; i++) { - if (static_cast(item.data.f[i]) == -1) { + for (uint32_t i = 2; i < chooseModeRange.size(); i++) { + if (chooseModeRange[i] == -1) { supportedPhysicalApertures.emplace_back(tempPhysicalApertures); vector().swap(tempPhysicalApertures); continue; } - tempPhysicalApertures.emplace_back(item.data.f[i]); + tempPhysicalApertures.emplace_back(chooseModeRange[i]); } return supportedPhysicalApertures; } @@ -337,7 +341,7 @@ void PortraitSession::SetPhysicalAperture(const float physicalAperture) int zoomMaxIndex = 1; auto it = std::find_if(physicalApertures.begin(), physicalApertures.end(), [¤tZoomRatio, &zoomMinIndex, &zoomMaxIndex](const std::vector physicalApertureRange) { - return physicalApertureRange[zoomMaxIndex] >= currentZoomRatio >= physicalApertureRange[zoomMinIndex]; + return physicalApertureRange[zoomMaxIndex] > currentZoomRatio >= physicalApertureRange[zoomMinIndex]; }); if (it == physicalApertures.end()) { MEDIA_ERR_LOG("current zoomRatio not supported in physical apertures zoom ratio"); diff --git a/frameworks/native/camera/src/session/profession_session.cpp b/frameworks/native/camera/src/session/profession_session.cpp index 9e4abfa7e..885ea8147 100644 --- a/frameworks/native/camera/src/session/profession_session.cpp +++ b/frameworks/native/camera/src/session/profession_session.cpp @@ -14,11 +14,12 @@ */ #include "session/profession_session.h" +#include "camera_log.h" #include "camera_metadata_operator.h" #include "camera_util.h" #include "hcapture_session_callback_stub.h" +#include "metadata_common_utils.h" #include "input/camera_input.h" -#include "camera_log.h" #include "output/photo_output.h" #include "output/preview_output.h" #include "output/video_output.h" @@ -399,9 +400,6 @@ int32_t ProfessionSession::SetSensorExposureTime(uint32_t exposureTime) "before setting camera properties"); return CameraErrorCode::SUCCESS; } - bool status = false; - int32_t count = 1; - camera_metadata_item_t item; MEDIA_DEBUG_LOG("ProfessionSession::SetSensorExposureTime exposure: %{public}d", exposureTime); if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime camera device is null"); @@ -429,13 +427,7 @@ int32_t ProfessionSession::SetSensorExposureTime(uint32_t exposureTime) } constexpr int32_t timeUnit = 1000000; camera_rational_t value = {.numerator = exposureTime, .denominator = timeUnit}; - int ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &item); - if (ret == CAM_META_ITEM_NOT_FOUND) { - status = changedMetadata_->addEntry(OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &value, count); - } else if (ret == CAM_META_SUCCESS) { - status = changedMetadata_->updateEntry(OHOS_CONTROL_SENSOR_EXPOSURE_TIME, &value, count); - } - if (!status) { + if (!AddOrUpdateMetadata(changedMetadata_, OHOS_CONTROL_SENSOR_EXPOSURE_TIME, value)) { MEDIA_ERR_LOG("ProfessionSession::SetSensorExposureTime Failed to set exposure compensation"); } exposureDurationValue_ = exposureTime; @@ -637,22 +629,12 @@ int32_t ProfessionSession::SetWhiteBalanceMode(WhiteBalanceMode mode) } else { whiteBalanceMode = itr->second; } - bool status = false; - int32_t ret; - uint32_t count = 1; - camera_metadata_item_t item; MEDIA_DEBUG_LOG("ProfessionSession::SetWhiteBalanceMode WhiteBalance mode: %{public}d", whiteBalanceMode); - ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_AWB_MODE, &item); - if (ret == CAM_META_ITEM_NOT_FOUND) { - status = changedMetadata_->addEntry(OHOS_CONTROL_AWB_MODE, &whiteBalanceMode, count); - } else if (ret == CAM_META_SUCCESS) { - status = changedMetadata_->updateEntry(OHOS_CONTROL_AWB_MODE, &whiteBalanceMode, count); - } // no manual wb mode need set maunual value to 0 if (mode != AWB_MODE_OFF) { SetManualWhiteBalance(0); } - if (!status) { + if (!AddOrUpdateMetadata(changedMetadata_, OHOS_CONTROL_AWB_MODE, whiteBalanceMode)) { MEDIA_ERR_LOG("ProfessionSession::SetWhiteBalanceMode Failed to set WhiteBalance mode"); } return CameraErrorCode::SUCCESS; @@ -740,11 +722,8 @@ int32_t ProfessionSession::SetManualWhiteBalance(int32_t wbValue) MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance Need to set WhiteBalanceMode off"); return CameraErrorCode::OPERATION_NOT_ALLOWED; } - bool status = false; int32_t minIndex = 0; int32_t maxIndex = 1; - int32_t count = 1; - camera_metadata_item_t item; MEDIA_DEBUG_LOG("ProfessionSession::SetManualWhiteBalance white balance: %{public}d", wbValue); if (!inputDevice_ || !inputDevice_->GetCameraDeviceInfo()) { MEDIA_ERR_LOG("ProfessionSession::SetManualWhiteBalance camera device is null"); @@ -766,14 +745,7 @@ int32_t ProfessionSession::SetManualWhiteBalance(int32_t wbValue) "%{public}d is greater than maximum wbValue: %{public}d", wbValue, whiteBalanceRange[maxIndex]); wbValue = whiteBalanceRange[maxIndex]; } - - int32_t ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SENSOR_WB_VALUE, &item); - if (ret == CAM_META_ITEM_NOT_FOUND) { - status = changedMetadata_->addEntry(OHOS_CONTROL_SENSOR_WB_VALUE, &wbValue, count); - } else if (ret == CAM_META_SUCCESS) { - status = changedMetadata_->updateEntry(OHOS_CONTROL_SENSOR_WB_VALUE, &wbValue, count); - } - if (!status) { + if (!AddOrUpdateMetadata(changedMetadata_, OHOS_CONTROL_SENSOR_WB_VALUE, wbValue)) { MEDIA_ERR_LOG("SetManualWhiteBalance Failed to SetManualWhiteBalance"); } return CameraErrorCode::SUCCESS; @@ -1199,12 +1171,12 @@ int32_t ProfessionSession::SetColorEffect(ColorEffect colorEffect) } else { colorEffectTemp = itr->second; } + MEDIA_DEBUG_LOG("ProfessionSession::SetColorEffect: %{public}d", colorEffect); bool status = false; int32_t ret; uint32_t count = 1; camera_metadata_item_t item; - MEDIA_DEBUG_LOG("ProfessionSession::SetColorEffect: %{public}d", colorEffect); ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), OHOS_CONTROL_SUPPORTED_COLOR_MODES, &item); if (ret == CAM_META_ITEM_NOT_FOUND) { status = changedMetadata_->addEntry(OHOS_CONTROL_SUPPORTED_COLOR_MODES, &colorEffectTemp, count); @@ -1256,34 +1228,8 @@ int32_t ProfessionSession::GetSupportedPhysicalApertures(std::vector> modeRanges = {}; - - std::vector modeRange = {}; - - for (uint32_t i = 0; i < item.count - 1; i++) { - if (item.data.f[i] == npos && item.data.f[i + 1] == npos) { - modeRange.emplace_back(npos); - modeRanges.emplace_back(std::move(modeRange)); - modeRange.clear(); - i++; - continue; - } - modeRange.emplace_back(item.data.f[i]); - } - float currentMode = static_cast(GetMode()); - auto it = std::find_if(modeRanges.begin(), modeRanges.end(), - [currentMode](auto value) -> bool { - return currentMode == value[0]; - }); - if (it == modeRanges.end()) { - MEDIA_ERR_LOG("ProfessionSession::GetSupportedPhysicalApertures Failed meta not support mode:%{public}d", - GetMode()); - return CameraErrorCode::SUCCESS; - } - - auto chooseModeRange = *it; + GetMode(), Container2String(allRange.begin(), allRange.end()).c_str()); + std::vector chooseModeRange = ParsePhysicalApertureRangeByMode(item, GetMode()); int32_t deviceCntPos = 1; int32_t supportedDeviceCount = static_cast(chooseModeRange[deviceCntPos]); if (supportedDeviceCount == 0) { @@ -1366,22 +1312,14 @@ int32_t ProfessionSession::SetPhysicalAperture(float physicalAperture) MEDIA_ERR_LOG("current physicalAperture is not supported"); return CameraErrorCode::SUCCESS; } - uint32_t count = 1; - bool status = false; - camera_metadata_item_t item; - int32_t ret = Camera::FindCameraMetadataItem(changedMetadata_->get(), - OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &item); - if (ret == CAM_META_ITEM_NOT_FOUND) { - status = changedMetadata_->addEntry(OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &physicalAperture, count); - } else if (ret == CAM_META_SUCCESS) { - status = changedMetadata_->updateEntry(OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, &physicalAperture, count); - } - if (!status) { + if (!AddOrUpdateMetadata(changedMetadata_, OHOS_CONTROL_CAMERA_PHYSICAL_APERTURE_VALUE, physicalAperture)) { MEDIA_ERR_LOG("SetPhysicalAperture Failed to set physical aperture"); + return CameraErrorCode::SUCCESS; } apertureValue_ = physicalAperture; return CameraErrorCode::SUCCESS; } + //callbacks void ProfessionSession::SetExposureInfoCallback(std::shared_ptr callback) { diff --git a/frameworks/native/camera/src/utils/metadata_common_utils.cpp b/frameworks/native/camera/src/utils/metadata_common_utils.cpp index 48ee9b948..b97899bd3 100644 --- a/frameworks/native/camera/src/utils/metadata_common_utils.cpp +++ b/frameworks/native/camera/src/utils/metadata_common_utils.cpp @@ -14,7 +14,7 @@ */ #include "metadata_common_utils.h" - +#include "camera_util.h" #include #include "camera_log.h" @@ -165,5 +165,42 @@ std::shared_ptr MetadataCommonUtils::CopyMetadata( } return result; } + +std::vector ParsePhysicalApertureRangeByMode(const camera_metadata_item_t &item, const int32_t modeName) +{ + const float factor = 20.0; + std::vector allRange = {}; + for (uint32_t i = 0; i < item.count; i++) { + allRange.push_back(item.data.f[i] * factor); + } + MEDIA_DEBUG_LOG("ParsePhysicalApertureRangeByMode allRange=%{public}s", + Container2String(allRange.begin(), allRange.end()).c_str()); + float npos = -1.0; + std::vector> modeRanges = {}; + std::vector modeRange = {}; + for (uint32_t i = 0; i < item.count - 1; i++) { + if (item.data.f[i] == npos && item.data.f[i + 1] == npos) { + modeRange.emplace_back(npos); + MEDIA_DEBUG_LOG("ParsePhysicalApertureRangeByMode mode %{public}d, modeRange=%{public}s", + modeName, Container2String(modeRange.begin(), modeRange.end()).c_str()); + modeRanges.emplace_back(std::move(modeRange)); + modeRange.clear(); + i++; + continue; + } + modeRange.emplace_back(item.data.f[i]); + } + float currentMode = static_cast(modeName); + auto it = std::find_if(modeRanges.begin(), modeRanges.end(), + [currentMode](auto value) -> bool { + return currentMode == value[0]; + }); + if (it == modeRanges.end()) { + MEDIA_ERR_LOG("ParsePhysicalApertureRangeByMode Failed meta not support mode:%{public}d", modeName); + return {}; + } + + return *it; +} } // namespace CameraStandard } // namespace OHOS diff --git a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp index 5c5c72e51..3aab832b0 100644 --- a/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp +++ b/frameworks/native/camera/test/moduletest/src/camera_framework_moduletest.cpp @@ -2914,11 +2914,11 @@ HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_071, ASSERT_NE(modeAbility, nullptr); SelectProfiles wanted; - wanted.preview.size_ = {640,480}; + wanted.preview.size_ = {640, 480}; wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.size_ = {640,480}; + wanted.video.size_ = {640, 480}; wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.framerates_ = {30,30}; + wanted.video.framerates_ = {30, 30}; SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); ASSERT_NE(profiles.preview.format_, -1); @@ -2986,11 +2986,11 @@ HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_072, ASSERT_NE(modeAbility, nullptr); SelectProfiles wanted; - wanted.preview.size_ = {640,480}; + wanted.preview.size_ = {640, 480}; wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.size_ = {640,480}; + wanted.video.size_ = {640, 480}; wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.framerates_ = {30,30}; + wanted.video.framerates_ = {30, 30}; SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); ASSERT_NE(profiles.preview.format_, -1); @@ -3152,11 +3152,11 @@ HWTEST_F(CameraFrameworkModuleTest, camera_framework_moduletest_profession_074, ASSERT_NE(modeAbility, nullptr); SelectProfiles wanted; - wanted.preview.size_ = {640,480}; + wanted.preview.size_ = {640, 480}; wanted.preview.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.size_ = {640,480}; + wanted.video.size_ = {640, 480}; wanted.video.format_ = CAMERA_FORMAT_RGBA_8888; - wanted.video.framerates_ = {30,30}; + wanted.video.framerates_ = {30, 30}; SelectProfiles profiles = SelectWantedProfiles(modeAbility, wanted); ASSERT_NE(profiles.preview.format_, -1); diff --git a/interfaces/inner_api/native/camera/include/session/profession_session.h b/interfaces/inner_api/native/camera/include/session/profession_session.h index a0ae86061..732060c90 100644 --- a/interfaces/inner_api/native/camera/include/session/profession_session.h +++ b/interfaces/inner_api/native/camera/include/session/profession_session.h @@ -498,6 +498,7 @@ protected: static const std::unordered_map metaExposureHintModeMap_; static const std::unordered_map fwkExposureHintModeMap_; private: + std::vector ParsePhysicalApertureRangeFromMeta(const camera_metadata_item_t &item); std::mutex sessionCallbackMutex_; std::shared_ptr exposureInfoCallback_ = nullptr; std::shared_ptr isoInfoCallback_ = nullptr; diff --git a/interfaces/inner_api/native/camera/include/utils/metadata_common_utils.h b/interfaces/inner_api/native/camera/include/utils/metadata_common_utils.h index 11da922eb..655f2b621 100644 --- a/interfaces/inner_api/native/camera/include/utils/metadata_common_utils.h +++ b/interfaces/inner_api/native/camera/include/utils/metadata_common_utils.h @@ -36,6 +36,23 @@ public: static std::shared_ptr CopyMetadata( const std::shared_ptr srcMetadata); }; + +template +bool AddOrUpdateMetadata(std::shared_ptr metadata, uint32_t tag, T value) +{ + uint32_t count = 1; + bool status = false; + camera_metadata_item_t item; + int32_t ret = OHOS::Camera::FindCameraMetadataItem(metadata->get(), tag, &item); + if (ret == CAM_META_ITEM_NOT_FOUND) { + status = metadata->addEntry(tag, &value, count); + } else if (ret == CAM_META_SUCCESS) { + status = metadata->updateEntry(tag, &value, count); + } + return status; +} + +std::vector ParsePhysicalApertureRangeByMode(const camera_metadata_item_t &item, const int32_t modeName); } // namespace CameraStandard } // namespace OHOS #endif // OHOS_CAMERA_METADATA_COMMON_UTILS_H \ No newline at end of file diff --git a/services/camera_service/src/hcamera_service.cpp b/services/camera_service/src/hcamera_service.cpp index 877a98d9a..78c0dc8a1 100644 --- a/services/camera_service/src/hcamera_service.cpp +++ b/services/camera_service/src/hcamera_service.cpp @@ -126,7 +126,7 @@ int32_t HCameraService::GetCameras( } camera_metadata_item_t item; common_metadata_header_t* metadata = cameraAbility->get(); - int ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_POSITION, &item); + ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_POSITION, &item); uint8_t cameraPosition = (ret == CAM_META_SUCCESS) ? item.data.u8[0] : OHOS_CAMERA_POSITION_OTHER; ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_FOLDSCREEN_TYPE, &item); uint8_t foldType = (ret == CAM_META_SUCCESS) ? item.data.u8[0] : OHOS_CAMERA_FOLDSCREEN_OTHER; @@ -137,11 +137,9 @@ int32_t HCameraService::GetCameras( cameraPosition = POSITION_FOLD_INNER; } ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_TYPE, &item); - camera_type_enum_t cameraType = (ret == CAM_META_SUCCESS) ? - static_cast(item.data.u8[0]) : OHOS_CAMERA_TYPE_UNSPECIFIED; + uint8_t cameraType = (ret == CAM_META_SUCCESS) ? item.data.u8[0] : OHOS_CAMERA_TYPE_UNSPECIFIED; ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_ABILITY_CAMERA_CONNECTION_TYPE, &item); - camera_connection_type_t connectionType = (ret == CAM_META_SUCCESS) ? - static_cast(item.data.u8[0]) : OHOS_CAMERA_CONNECTION_TYPE_BUILTIN; + uint8_t connectionType = (ret == CAM_META_SUCCESS) ? item.data.u8[0] : OHOS_CAMERA_CONNECTION_TYPE_BUILTIN; ret = OHOS::Camera::FindCameraMetadataItem(metadata, OHOS_CONTROL_CAPTURE_MIRROR_SUPPORTED, &item); bool isMirrorSupported = (ret == CAM_META_SUCCESS) ? ((item.data.u8[0] == 1) || (item.data.u8[0] == 0)) : false; @@ -150,8 +148,8 @@ int32_t HCameraService::GetCameras( for (uint32_t i = 0; i < item.count; i++) { supportModes.push_back(item.data.u8[i]); } - CAMERA_SYSEVENT_STATISTIC(CreateMsg("CameraManager GetCameras camera ID:%s, Camera position:%d," - " Camera Type:%d, Connection Type:%d, Mirror support:%d", + CAMERA_SYSEVENT_STATISTIC(CreateMsg("CameraManager GetCameras camera ID:%s, Camera position:%d, " + "Camera Type:%d, Connection Type:%d, Mirror support:%d", id.c_str(), cameraPosition, cameraType, connectionType, isMirrorSupported)); cameraInfos.emplace_back(make_shared(id, cameraType, cameraPosition, connectionType, supportModes, cameraAbility)); -- Gitee From e7eb63a64f4f800b6fcb71859ba90b18e128c7f1 Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Thu, 25 Apr 2024 19:02:02 +0800 Subject: [PATCH 7/8] fix huge function Signed-off-by: lvxiaoqiang Change-Id: Ie4a6a9cd10fa85ecd2871fab21a42d7002ac5886 --- .../src/mode/mode_manager_napi.cpp | 8 +- .../src/mode/profession_session_napi.cpp | 120 +++++++++--------- frameworks/native/camera/BUILD.gn | 1 - .../camera/src/input/camera_manager.cpp | 2 +- .../camera/src/session/profession_session.cpp | 6 - .../src/utils/metadata_common_utils.cpp | 4 +- .../include/mode/profession_session_napi.h | 7 + .../camera_service/src/hcamera_service.cpp | 34 ++--- 8 files changed, 93 insertions(+), 89 deletions(-) diff --git a/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp b/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp index fdada8d64..99525c399 100644 --- a/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/mode_manager_napi.cpp @@ -152,16 +152,16 @@ napi_value ModeManagerNapi::CreateCameraSessionInstance(napi_env env, napi_callb napi_get_value_int32(env, argv[PARAM0], &jsModeName); MEDIA_INFO_LOG("ModeManagerNapi::CreateCameraSessionInstance mode = %{public}d", jsModeName); switch (jsModeName) { - case JS_CAPTURE: + case JsSceneMode::JS_CAPTURE: result = PhotoSessionNapi::CreateCameraSession(env); break; - case JS_VIDEO: + case JsSceneMode::JS_VIDEO: result = VideoSessionNapi::CreateCameraSession(env); break; - case JS_PORTRAIT: + case JsSceneMode::JS_PORTRAIT: result = PortraitSessionNapi::CreateCameraSession(env); break; - case JS_NIGHT: + case JsSceneMode::JS_NIGHT: result = NightSessionNapi::CreateCameraSession(env); break; default: diff --git a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp index 3a9f14323..3f65841a7 100644 --- a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp @@ -51,76 +51,76 @@ void ProfessionSessionNapi::ProfessionSessionNapiDestructor(napi_env env, void* delete cameraObj; } } + +const std::vector ProfessionSessionNapi::manual_exposure_funcs = { + DECLARE_NAPI_FUNCTION("getSupportedMeteringModes", ProfessionSessionNapi::GetSupportedMeteringModes), + DECLARE_NAPI_FUNCTION("isExposureMeteringModeSupported", ProfessionSessionNapi::IsMeteringModeSupported), + DECLARE_NAPI_FUNCTION("getExposureMeteringMode", ProfessionSessionNapi::GetMeteringMode), + DECLARE_NAPI_FUNCTION("setExposureMeteringMode", ProfessionSessionNapi::SetMeteringMode), + + DECLARE_NAPI_FUNCTION("getExposureDurationRange", ProfessionSessionNapi::GetExposureDurationRange), + DECLARE_NAPI_FUNCTION("getExposureDuration", ProfessionSessionNapi::GetExposureDuration), + DECLARE_NAPI_FUNCTION("setExposureDuration", ProfessionSessionNapi::SetExposureDuration), +}; + +const std::vector ProfessionSessionNapi::manual_focus_funcs = { + DECLARE_NAPI_FUNCTION("getSupportedFocusAssistFlashModes", ProfessionSessionNapi::GetSupportedFocusAssistFlashModes), + DECLARE_NAPI_FUNCTION("isFocusAssistSupported", ProfessionSessionNapi::IsFocusAssistFlashModeSupported), + DECLARE_NAPI_FUNCTION("getFocusAssistFlashMode", ProfessionSessionNapi::GetFocusAssistFlashMode), + DECLARE_NAPI_FUNCTION("setFocusAssist", ProfessionSessionNapi::SetFocusAssistFlashMode), +}; + +const std::vector ProfessionSessionNapi::manual_iso_props = { + DECLARE_NAPI_FUNCTION("getISORange", ProfessionSessionNapi::GetIsoRange), + DECLARE_NAPI_FUNCTION("isManualISOSupported", ProfessionSessionNapi::IsManualIsoSupported), + DECLARE_NAPI_FUNCTION("getISO", ProfessionSessionNapi::GetISO), + DECLARE_NAPI_FUNCTION("setISO", ProfessionSessionNapi::SetISO), +}; + +const std::vector ProfessionSessionNapi::auto_wb_props = { + DECLARE_NAPI_FUNCTION("getSupportedWhiteBalanceModes", ProfessionSessionNapi::GetSupportedWhiteBalanceModes), + DECLARE_NAPI_FUNCTION("isWhiteBalanceModeSupported", ProfessionSessionNapi::IsWhiteBalanceModeSupported), + DECLARE_NAPI_FUNCTION("getWhiteBalanceMode", ProfessionSessionNapi::GetWhiteBalanceMode), + DECLARE_NAPI_FUNCTION("setWhiteBalanceMode", ProfessionSessionNapi::SetWhiteBalanceMode), +}; + +const std::vector ProfessionSessionNapi::manual_wb_props = { + DECLARE_NAPI_FUNCTION("getWhiteBalanceRange", ProfessionSessionNapi::GetManualWhiteBalanceRange), + DECLARE_NAPI_FUNCTION("isManualWhiteBalanceSupported", ProfessionSessionNapi::IsManualWhiteBalanceSupported), + DECLARE_NAPI_FUNCTION("getWhiteBalance", ProfessionSessionNapi::GetManualWhiteBalance), + DECLARE_NAPI_FUNCTION("setWhiteBalance", ProfessionSessionNapi::SetManualWhiteBalance), +}; + +const std::vector ProfessionSessionNapi::pro_session_props = { + DECLARE_NAPI_FUNCTION("getSupportedExposureHintModes", ProfessionSessionNapi::GetSupportedExposureHintModes), + DECLARE_NAPI_FUNCTION("getExposureHintMode", ProfessionSessionNapi::GetExposureHintMode), + DECLARE_NAPI_FUNCTION("setExposureHintMode", ProfessionSessionNapi::SetExposureHintMode), + + DECLARE_NAPI_FUNCTION("getSupportedPhysicalApertures", ProfessionSessionNapi::GetSupportedPhysicalApertures), + DECLARE_NAPI_FUNCTION("getPhysicalAperture", ProfessionSessionNapi::GetPhysicalAperture), + DECLARE_NAPI_FUNCTION("setPhysicalAperture", ProfessionSessionNapi::SetPhysicalAperture), + DECLARE_NAPI_FUNCTION("on", ProfessionSessionNapi::On), + DECLARE_NAPI_FUNCTION("once", ProfessionSessionNapi::Once), + DECLARE_NAPI_FUNCTION("off", ProfessionSessionNapi::Off), +}; + napi_value ProfessionSessionNapi::Init(napi_env env, napi_value exports) { MEDIA_DEBUG_LOG("Init is called"); napi_status status; napi_value ctorObj; std::vector manual_exposure_props = CameraSessionNapi::auto_exposure_props; - std::vector manual_exposure_funcs = { - DECLARE_NAPI_FUNCTION("getSupportedMeteringModes", GetSupportedMeteringModes), - DECLARE_NAPI_FUNCTION("isExposureMeteringModeSupported", IsMeteringModeSupported), - DECLARE_NAPI_FUNCTION("getExposureMeteringMode", GetMeteringMode), - DECLARE_NAPI_FUNCTION("setExposureMeteringMode", SetMeteringMode), - - DECLARE_NAPI_FUNCTION("getExposureDurationRange", GetExposureDurationRange), - DECLARE_NAPI_FUNCTION("getExposureDuration", GetExposureDuration), - DECLARE_NAPI_FUNCTION("setExposureDuration", SetExposureDuration), - }; - manual_exposure_props.insert(manual_exposure_props.end(), - manual_exposure_funcs.begin(), manual_exposure_funcs.end()); - + manual_exposure_props.insert(manual_exposure_props.end(), ProfessionSessionNapi::manual_exposure_funcs.begin(), + ProfessionSessionNapi::manual_exposure_funcs.end()); std::vector pro_manual_focus_props = CameraSessionNapi::manual_focus_props; - std::vector manual_focus_funcs = { - DECLARE_NAPI_FUNCTION("getSupportedFocusAssistFlashModes", GetSupportedFocusAssistFlashModes), - DECLARE_NAPI_FUNCTION("isFocusAssistSupported", IsFocusAssistFlashModeSupported), - DECLARE_NAPI_FUNCTION("getFocusAssistFlashMode", GetFocusAssistFlashMode), - DECLARE_NAPI_FUNCTION("setFocusAssist", SetFocusAssistFlashMode), - }; - - pro_manual_focus_props.insert(pro_manual_focus_props.end(), - manual_focus_funcs.begin(), manual_focus_funcs.end()); - - std::vector manual_iso_props = { - DECLARE_NAPI_FUNCTION("getISORange", GetIsoRange), - DECLARE_NAPI_FUNCTION("isManualISOSupported", IsManualIsoSupported), - DECLARE_NAPI_FUNCTION("getISO", GetISO), - DECLARE_NAPI_FUNCTION("setISO", SetISO), - }; - - std::vector auto_wb_props = { - DECLARE_NAPI_FUNCTION("getSupportedWhiteBalanceModes", GetSupportedWhiteBalanceModes), - DECLARE_NAPI_FUNCTION("isWhiteBalanceModeSupported", IsWhiteBalanceModeSupported), - DECLARE_NAPI_FUNCTION("getWhiteBalanceMode", GetWhiteBalanceMode), - DECLARE_NAPI_FUNCTION("setWhiteBalanceMode", SetWhiteBalanceMode), - }; - - std::vector manual_wb_props = { - DECLARE_NAPI_FUNCTION("getWhiteBalanceRange", GetManualWhiteBalanceRange), - DECLARE_NAPI_FUNCTION("isManualWhiteBalanceSupported", IsManualWhiteBalanceSupported), - DECLARE_NAPI_FUNCTION("getWhiteBalance", GetManualWhiteBalance), - DECLARE_NAPI_FUNCTION("setWhiteBalance", SetManualWhiteBalance), - }; - - std::vector pro_session_props = { - DECLARE_NAPI_FUNCTION("getSupportedExposureHintModes", GetSupportedExposureHintModes), - DECLARE_NAPI_FUNCTION("getExposureHintMode", GetExposureHintMode), - DECLARE_NAPI_FUNCTION("setExposureHintMode", SetExposureHintMode), - - DECLARE_NAPI_FUNCTION("getSupportedPhysicalApertures", GetSupportedPhysicalApertures), - DECLARE_NAPI_FUNCTION("getPhysicalAperture", GetPhysicalAperture), - DECLARE_NAPI_FUNCTION("setPhysicalAperture", SetPhysicalAperture), - DECLARE_NAPI_FUNCTION("on", On), - DECLARE_NAPI_FUNCTION("once", Once), - DECLARE_NAPI_FUNCTION("off", Off), - }; - + pro_manual_focus_props.insert(pro_manual_focus_props.end(), ProfessionSessionNapi::manual_focus_funcs.begin(), + ProfessionSessionNapi::manual_focus_funcs.end()); std::vector> descriptors = { CameraSessionNapi::camera_process_props, CameraSessionNapi::zoom_props, CameraSessionNapi::color_effect_props, CameraSessionNapi::flash_props, - CameraSessionNapi::focus_props, manual_iso_props, auto_wb_props, manual_wb_props, - manual_exposure_props, pro_manual_focus_props, pro_session_props}; - + CameraSessionNapi::focus_props, ProfessionSessionNapi::manual_iso_props, + ProfessionSessionNapi::auto_wb_props, ProfessionSessionNapi::manual_wb_props, + ProfessionSessionNapi::pro_session_props, manual_exposure_props, pro_manual_focus_props}; std::vector professional_session_props = CameraNapiUtils::GetPropertyDescriptor(descriptors); status = napi_define_class(env, PROFESSIONAL_SESSION_NAPI_CLASS_NAME, NAPI_AUTO_LENGTH, diff --git a/frameworks/native/camera/BUILD.gn b/frameworks/native/camera/BUILD.gn index e3b90007e..f06f0145a 100644 --- a/frameworks/native/camera/BUILD.gn +++ b/frameworks/native/camera/BUILD.gn @@ -113,7 +113,6 @@ ohos_shared_library("camera_framework") { cfi = true cfi_cross_dso = true debug = false - blocklist = "../../../cfi_blocklist.txt" } public_configs = [ diff --git a/frameworks/native/camera/src/input/camera_manager.cpp b/frameworks/native/camera/src/input/camera_manager.cpp index a68e81f50..f23154cce 100644 --- a/frameworks/native/camera/src/input/camera_manager.cpp +++ b/frameworks/native/camera/src/input/camera_manager.cpp @@ -828,7 +828,7 @@ sptr CameraManager::GetCameraDeviceFromId(std::string cameraId) return cameraObj; } -sptr &CameraManager::GetInstance() __attribute__((no_sanitize("cfi"))) +sptr &CameraManager::GetInstance() { if (CameraManager::cameraManager_ == nullptr) { std::unique_lock lock(instanceMutex_); diff --git a/frameworks/native/camera/src/session/profession_session.cpp b/frameworks/native/camera/src/session/profession_session.cpp index 885ea8147..cb3bbb3bd 100644 --- a/frameworks/native/camera/src/session/profession_session.cpp +++ b/frameworks/native/camera/src/session/profession_session.cpp @@ -1223,12 +1223,6 @@ int32_t ProfessionSession::GetSupportedPhysicalApertures(std::vector allRange = {}; - for (uint32_t i = 0; i < item.count; i++) { - allRange.push_back(ConfusingNumber(item.data.f[i])); - } - MEDIA_DEBUG_LOG("ProfessionSession::GetSupportedPhysicalApertures mode %{public}d, allRange=%{public}s", - GetMode(), Container2String(allRange.begin(), allRange.end()).c_str()); std::vector chooseModeRange = ParsePhysicalApertureRangeByMode(item, GetMode()); int32_t deviceCntPos = 1; int32_t supportedDeviceCount = static_cast(chooseModeRange[deviceCntPos]); diff --git a/frameworks/native/camera/src/utils/metadata_common_utils.cpp b/frameworks/native/camera/src/utils/metadata_common_utils.cpp index b97899bd3..23073671b 100644 --- a/frameworks/native/camera/src/utils/metadata_common_utils.cpp +++ b/frameworks/native/camera/src/utils/metadata_common_utils.cpp @@ -182,7 +182,7 @@ std::vector ParsePhysicalApertureRangeByMode(const camera_metadata_item_t if (item.data.f[i] == npos && item.data.f[i + 1] == npos) { modeRange.emplace_back(npos); MEDIA_DEBUG_LOG("ParsePhysicalApertureRangeByMode mode %{public}d, modeRange=%{public}s", - modeName, Container2String(modeRange.begin(), modeRange.end()).c_str()); + modeName, Container2String(modeRange.begin(), modeRange.end()).c_str()); modeRanges.emplace_back(std::move(modeRange)); modeRange.clear(); i++; @@ -196,7 +196,7 @@ std::vector ParsePhysicalApertureRangeByMode(const camera_metadata_item_t return currentMode == value[0]; }); if (it == modeRanges.end()) { - MEDIA_ERR_LOG("ParsePhysicalApertureRangeByMode Failed meta not support mode:%{public}d", modeName); + MEDIA_ERR_LOG("ParsePhysicalApertureRangeByMode Failed meta not support mode:%{public}d", modeName); return {}; } diff --git a/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h b/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h index e33f037ae..d93b77776 100644 --- a/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h +++ b/interfaces/kits/js/camera_napi/include/mode/profession_session_napi.h @@ -150,6 +150,13 @@ public: static napi_value Once(napi_env env, napi_callback_info info); static napi_value Off(napi_env env, napi_callback_info info); + static const std::vector manual_exposure_funcs; + static const std::vector manual_focus_funcs; + static const std::vector manual_iso_props; + static const std::vector auto_wb_props; + static const std::vector manual_wb_props; + static const std::vector pro_session_props; + std::shared_ptr exposureInfoCallback_ = nullptr; std::shared_ptr isoInfoCallback_ = nullptr; std::shared_ptr apertureInfoCallback_ = nullptr; diff --git a/services/camera_service/src/hcamera_service.cpp b/services/camera_service/src/hcamera_service.cpp index 78c0dc8a1..37e0fc112 100644 --- a/services/camera_service/src/hcamera_service.cpp +++ b/services/camera_service/src/hcamera_service.cpp @@ -182,27 +182,31 @@ vector> HCameraService::ChoosePhysicalCameras( OHOS::HDI::Camera::V1_3::OperationMode::PROFESSIONAL_PHOTO, OHOS::HDI::Camera::V1_3::OperationMode::PROFESSIONAL_VIDEO, }; - vector> physicalCameras; + vector> physicalCameraInfos = {}; for (auto& camera : cameraInfos) { if (std::any_of(choosedCameras.begin(), choosedCameras.end(), [camera](const auto& defaultCamera) { return camera->cameraId == defaultCamera->cameraId; }) ) { - MEDIA_INFO_LOG("ChoosePhysicalCameras alreadly has default camera"); + MEDIA_INFO_LOG("ChoosePhysicalCameras alreadly has default camera: %{public}s", camera->cameraId.c_str()); } else { - MEDIA_INFO_LOG("ChoosePhysicalCameras camera ID:%s, CameraType: %{public}d, Camera position:%{public}d, " - "Connection Type:%{public}d", - camera->cameraId.c_str(), camera->cameraType, camera->position, camera->connectionType); - - bool isSupportPhysicalCamera = std::any_of(camera->supportModes.begin(), camera->supportModes.end(), - [&supportedPhysicalCamerasModes](auto mode) -> bool { - return any_of(supportedPhysicalCamerasModes.begin(), supportedPhysicalCamerasModes.end(), - [mode](auto it)-> bool { return it == mode; }); - }); - if (camera->cameraType != camera_type_enum_t::OHOS_CAMERA_TYPE_UNSPECIFIED && isSupportPhysicalCamera) { - physicalCameras.emplace_back(camera); - MEDIA_INFO_LOG("ChoosePhysicalCameras add camera ID:%{public}s", camera->cameraId.c_str()); - } + physicalCameraInfos.push_back(camera); + } + } + vector> physicalCameras = {}; + for (auto& camera : physicalCameraInfos) { + MEDIA_INFO_LOG("ChoosePhysicalCameras camera ID:%s, CameraType: %{public}d, Camera position:%{public}d, " + "Connection Type:%{public}d", + camera->cameraId.c_str(), camera->cameraType, camera->position, camera->connectionType); + + bool isSupportPhysicalCamera = std::any_of(camera->supportModes.begin(), camera->supportModes.end(), + [&supportedPhysicalCamerasModes](auto mode) -> bool { + return any_of(supportedPhysicalCamerasModes.begin(), supportedPhysicalCamerasModes.end(), + [mode](auto it)-> bool { return it == mode; }); + }); + if (camera->cameraType != camera_type_enum_t::OHOS_CAMERA_TYPE_UNSPECIFIED && isSupportPhysicalCamera) { + physicalCameras.emplace_back(camera); + MEDIA_INFO_LOG("ChoosePhysicalCameras add camera ID:%{public}s", camera->cameraId.c_str()); } } return physicalCameras; -- Gitee From 9f3b61b1477bec199c748fa5568d2fd69c9433a7 Mon Sep 17 00:00:00 2001 From: lvxiaoqiang Date: Thu, 25 Apr 2024 19:53:47 +0800 Subject: [PATCH 8/8] fix code check Signed-off-by: lvxiaoqiang Change-Id: I3f6c4a6c69b17a03b6eddc9edf7597d0d50a6513 --- .../js/camera_napi/src/mode/profession_session_napi.cpp | 5 +++-- services/camera_service/src/hcamera_service.cpp | 5 ++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp index 3f65841a7..9a1230831 100644 --- a/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp +++ b/frameworks/js/camera_napi/src/mode/profession_session_napi.cpp @@ -64,7 +64,8 @@ const std::vector ProfessionSessionNapi::manual_exposu }; const std::vector ProfessionSessionNapi::manual_focus_funcs = { - DECLARE_NAPI_FUNCTION("getSupportedFocusAssistFlashModes", ProfessionSessionNapi::GetSupportedFocusAssistFlashModes), + DECLARE_NAPI_FUNCTION("getSupportedFocusAssistFlashModes", + ProfessionSessionNapi::GetSupportedFocusAssistFlashModes), DECLARE_NAPI_FUNCTION("isFocusAssistSupported", ProfessionSessionNapi::IsFocusAssistFlashModeSupported), DECLARE_NAPI_FUNCTION("getFocusAssistFlashMode", ProfessionSessionNapi::GetFocusAssistFlashMode), DECLARE_NAPI_FUNCTION("setFocusAssist", ProfessionSessionNapi::SetFocusAssistFlashMode), @@ -113,7 +114,7 @@ napi_value ProfessionSessionNapi::Init(napi_env env, napi_value exports) manual_exposure_props.insert(manual_exposure_props.end(), ProfessionSessionNapi::manual_exposure_funcs.begin(), ProfessionSessionNapi::manual_exposure_funcs.end()); std::vector pro_manual_focus_props = CameraSessionNapi::manual_focus_props; - pro_manual_focus_props.insert(pro_manual_focus_props.end(), ProfessionSessionNapi::manual_focus_funcs.begin(), + pro_manual_focus_props.insert(pro_manual_focus_props.end(), ProfessionSessionNapi::manual_focus_funcs.begin(), ProfessionSessionNapi::manual_focus_funcs.end()); std::vector> descriptors = { CameraSessionNapi::camera_process_props, CameraSessionNapi::zoom_props, diff --git a/services/camera_service/src/hcamera_service.cpp b/services/camera_service/src/hcamera_service.cpp index 37e0fc112..0113d80ba 100644 --- a/services/camera_service/src/hcamera_service.cpp +++ b/services/camera_service/src/hcamera_service.cpp @@ -196,9 +196,8 @@ vector> HCameraService::ChoosePhysicalCameras( vector> physicalCameras = {}; for (auto& camera : physicalCameraInfos) { MEDIA_INFO_LOG("ChoosePhysicalCameras camera ID:%s, CameraType: %{public}d, Camera position:%{public}d, " - "Connection Type:%{public}d", - camera->cameraId.c_str(), camera->cameraType, camera->position, camera->connectionType); - + "Connection Type:%{public}d", + camera->cameraId.c_str(), camera->cameraType, camera->position, camera->connectionType); bool isSupportPhysicalCamera = std::any_of(camera->supportModes.begin(), camera->supportModes.end(), [&supportedPhysicalCamerasModes](auto mode) -> bool { return any_of(supportedPhysicalCamerasModes.begin(), supportedPhysicalCamerasModes.end(), -- Gitee