diff --git a/camera/interfaces/include/display_format.h b/camera/interfaces/include/display_format.h index e24890d9af8b6640f57db8f31446dd78e6f6fc04..82cd29db3aef9b6c92c1f8263b19ec96f9b32c8b 100644 --- a/camera/interfaces/include/display_format.h +++ b/camera/interfaces/include/display_format.h @@ -108,6 +108,7 @@ enum PixelFormat : int32_t { PIXEL_FMT_YCBCR_P010, PIXEL_FMT_YCRCB_P010, PIXEL_FMT_RAW10, + PIXEL_FMT_BLOB, PIXEL_FMT_VENDER_MASK = 0X7FFF0000, PIXEL_FMT_BUTT = 0X7FFFFFFF, }; diff --git a/camera/test/ut/usb_camera/test_camera_base.cpp b/camera/test/ut/usb_camera/test_camera_base.cpp index d5829596cd4fc3869dc01009e5764f0199446742..50131ab9a02b5dd90c6fbd8bc53d7c3a8bd0b103 100644 --- a/camera/test/ut/usb_camera/test_camera_base.cpp +++ b/camera/test/ut/usb_camera/test_camera_base.cpp @@ -415,7 +415,7 @@ void TestCameraBase::Init() void TestCameraBase::UsbInit() { if (cameraHost == nullptr) { - constexpr const char *demoServiceName = "camera_service"; + constexpr const char *demoServiceName = "camera_service_usb"; cameraHost = ICameraHost::Get(demoServiceName, false); if (cameraHost == nullptr) { std::cout << "==========[test log] CreateCameraHost failed." << std::endl; diff --git a/camera/test/ut/usb_camera/usb_camera_test.cpp b/camera/test/ut/usb_camera/usb_camera_test.cpp index d74d824eb2cfca2b57653dbf4d0ae4d04f4eed0f..d84479b63433c6100e9745a6a37f42c1ac31ca56 100644 --- a/camera/test/ut/usb_camera/usb_camera_test.cpp +++ b/camera/test/ut/usb_camera/usb_camera_test.cpp @@ -1958,4 +1958,115 @@ TEST_F(UtestUSBCameraTest, camera_usb_0056) cameraBase_->captureIds = {cameraBase_->CAPTURE_ID_PREVIEW, cameraBase_->CAPTURE_ID_VIDEO}; cameraBase_->streamIds = {cameraBase_->STREAM_ID_PREVIEW, cameraBase_->STREAM_ID_VIDEO}; cameraBase_->StopStream(cameraBase_->captureIds, cameraBase_->streamIds); -} \ No newline at end of file +} + +/** + * @tc.name: USB Camera + * @tc.desc: single preview stream, output jpeg, expected success. + * @tc.level: Level0 + * @tc.size: MediumTest + * @tc.type: Function + */ +TEST_F(UtestUSBCameraTest, camera_usb_0057) +{ + cameraBase_->OpenUsbCamera(); + cameraBase_->AchieveStreamOperator(); + auto streamCustomerVideo = std::make_shared(); + + uint32_t captureIdPreview = cameraBase_->CAPTURE_ID_PREVIEW; + uint32_t streamIdPreview = cameraBase_->STREAM_ID_PREVIEW; + std::vector streamInfos; + StreamInfo streamInfo = {}; + streamInfo.streamId_ = streamIdPreview; + streamInfo.width_ = 1280; // 1280:picture width + streamInfo.height_ = 720; // 720:picture height + streamInfo.format_ = PIXEL_FMT_BLOB; + streamInfo.encodeType_ = ENCODE_TYPE_NULL; + streamInfo.dataspace_ = 8; // 8:picture dataspace + streamInfo.intent_ = PREVIEW; + streamInfo.tunneledMode_ = 5; // 5:tunnel mode + auto producer = streamCustomerVideo->CreateProducer(); + streamInfo.bufferQueue_ = new (std::nothrow) BufferProducerSequenceable(producer); + ASSERT_NE(streamInfo.bufferQueue_, nullptr); + streamInfo.bufferQueue_->producer_->SetQueueSize(8); // 8:set bufferQueue size + streamInfos.push_back(streamInfo); + + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->CreateStreams(streamInfos); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->CommitStreams(NORMAL, cameraBase_->ability_); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + streamCustomerVideo->ReceiveFrameOn([this](const unsigned char *addr, const uint32_t size) { + StoreFile(addr, size, "preview_mjpeg.jpeg"); + }); + + CaptureInfo captureInfoPreview = { + .streamIds_ = {streamIdPreview}, + .captureSetting_ = cameraBase_->ability_, + .enableShutterCallback_ = false, + }; + std::cout << "start capture preview1" << std::endl; + CAMERA_LOGE("start capture preview1"); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->Capture(captureIdPreview, captureInfoPreview, true); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + sleep(1); + + std::cout << "cancel capture preview1" << std::endl; + CAMERA_LOGE("cancel capture preview1"); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->CancelCapture(captureIdPreview); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + sleep(1); + + std::cout << "start capture preview2" << std::endl; + CAMERA_LOGE("start capture preview2"); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->Capture(captureIdPreview, captureInfoPreview, true); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + sleep(1); + + streamCustomerVideo->ReceiveFrameOff(); + + std::cout << "cancel capture preview2" << std::endl; + CAMERA_LOGE("cancel capture preview2"); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->CancelCapture({captureIdPreview}); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); + sleep(1); + cameraBase_->rc = (CamRetCode)cameraBase_->streamOperator->ReleaseStreams({streamIdPreview}); + EXPECT_EQ(true, cameraBase_->rc == HDI::Camera::V1_0::NO_ERROR); +} + +/** + * @tc.name: USB Camera + * @tc.desc: get value of OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS + * @tc.level: Level0 + * @tc.size: MediumTest + * @tc.type: Function + */ +TEST_F(UtestUSBCameraTest, camera_usb_0058) +{ + bool hasFormatMjpeg = false; + cameraBase_->OpenUsbCamera(); + ability_ = cameraBase_->GetCameraAbility(); + EXPECT_NE(ability_, nullptr); + common_metadata_header_t *data = ability_->get(); + EXPECT_NE(data, nullptr); + camera_metadata_item_t entry; + int ret = FindCameraMetadataItem(data, OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &entry); + if (ret == 0 && entry.data.i32 != nullptr && entry.count > 0) { + std::cout << "print tag value start." << std::endl; + std::cout << "count" << (entry.count - 1) << std::endl; + for (size_t i = 1; i < entry.count; i++) { + std::cout << entry.data.i32[i] << " "; + if (entry.data.i32[i] != -1) { + continue; + } + std::cout << std::endl; + if (entry.data.i32[i - 1] != -1 && entry.data.i32[i + 1] == OHOS_CAMERA_FORMAT_MJPEG) { + hasFormatMjpeg = true; + } + } + std::cout << "print tag value end." << std::endl; + } + std::cout << "OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS has hasFormatMjpeg :" << hasFormatMjpeg + << std::endl; +} diff --git a/camera/test/ut/v4l2/stream_customer.cpp b/camera/test/ut/v4l2/stream_customer.cpp index c4d3c52afec2ce42621c3c3299fd0ef495eefef4..339c5ff14968ad42f1c0fd932e3289c9d5d3a80b 100644 --- a/camera/test/ut/v4l2/stream_customer.cpp +++ b/camera/test/ut/v4l2/stream_customer.cpp @@ -32,11 +32,17 @@ void StreamCustomer::CamFrame(const std::functionAcquireBuffer(buff, flushFence, timestamp, damage); if (buff != nullptr) { void* addr = buff->GetVirAddr(); - int32_t size = buff->GetSize(); + int32_t gotSize = 0; + buff->GetExtraData()->ExtraGet(OHOS::Camera::dataSize, gotSize); + CAMERA_LOGD("OHOS::Camera::dataSize: %{public}d", gotSize); + uint32_t size = gotSize > 0 ? gotSize : buff->GetSize(); if (callback != nullptr) { callback(static_cast(addr), size); } - consumer_->ReleaseBuffer(buff, -1); + auto ret = consumer_->ReleaseBuffer(buff, flushFence); + if (!ret) { + CAMERA_LOGE("ReleaseBuffer fail: %{public}d", ret); + } } usleep(delayTime); } while (camFrameExit_ == 0); diff --git a/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/include/v4l2_buffer.h b/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/include/v4l2_buffer.h index f799b6c49f3b8d5f0000cbd5135a73cf4dbf82ed..b9a87552b1cba4ab8c7b740f9b83d943ae579da4 100644 --- a/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/include/v4l2_buffer.h +++ b/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/include/v4l2_buffer.h @@ -41,6 +41,7 @@ struct AdapterBuff { void* userBufPtr; int32_t heapfd; int32_t dmafd; + std::shared_ptr cameraBuffer; }; class HosV4L2Buffers : public std::enable_shared_from_this { // hide construct function diff --git a/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/src/v4l2_buffer.cpp b/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/src/v4l2_buffer.cpp index 99bbf440b4bbb5dbec5b3803236d184302fa2f53..103a5e73bac1b20f2dbaaed10c9e56c3b59a52d7 100644 --- a/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/src/v4l2_buffer.cpp +++ b/camera/vdi_base/common/adapter/platform/v4l2/src/driver_adapter/src/v4l2_buffer.cpp @@ -327,8 +327,15 @@ RetCode HosV4L2Buffers::V4L2DequeueBuffer(int fd) CAMERA_LOGE("ERROR: BufferMap length error"); return RC_ERROR; } + CAMERA_LOGD("memcpy_s buffer to user buffer, curFormat = %{public}u, bytesused = %{public}u", + adapterBufferMap_[buf.index].cameraBuffer->GetCurFormat(), buf.bytesused); + uint32_t length = adapterBufferMap_[buf.index].length; + if (adapterBufferMap_[buf.index].cameraBuffer->GetCurFormat() == CAMERA_FORMAT_BLOB) { + length = buf.bytesused; + adapterBufferMap_[buf.index].cameraBuffer->SetEsFrameSize(length); + } (void)memcpy_s(adapterBufferMap_[buf.index].userBufPtr, adapterBufferMap_[buf.index].length, - adapterBufferMap_[buf.index].start, adapterBufferMap_[buf.index].length); + adapterBufferMap_[buf.index].start, length); } } std::lock_guard l(bufferLock_); @@ -405,6 +412,7 @@ RetCode HosV4L2Buffers::SetAdapterBuffer(int fd, struct v4l2_buffer &buf, const } adapterBufferMap_[index].userBufPtr = frameSpec->buffer_->GetVirAddress(); + adapterBufferMap_[index].cameraBuffer = frameSpec->buffer_; switch (memoryType_) { case V4L2_MEMORY_MMAP: diff --git a/camera/vdi_base/common/buffer_manager/src/buffer_adapter/standard/buffer_adapter.h b/camera/vdi_base/common/buffer_manager/src/buffer_adapter/standard/buffer_adapter.h index 4a86b576ed20f02dc33692ab8cb67570bed4e798..9848ec49a2b6e217b9ec9aab906c9a93f2654255 100644 --- a/camera/vdi_base/common/buffer_manager/src/buffer_adapter/standard/buffer_adapter.h +++ b/camera/vdi_base/common/buffer_manager/src/buffer_adapter/standard/buffer_adapter.h @@ -83,6 +83,7 @@ public: CameraHalHisysevent::CreateMsg("CameraFormatToPixelFormat failed cameraFormat:%d", cameraFormat)); CAMERA_LOGI("not find cameraFormat = %{public}u, use default format", cameraFormat); } + CAMERA_LOGD("CameraFormatToPixelFormat: %{public}u -> %{public}u", cameraFormat, format); return format; } // convert pixel format to camera format. inline implementation to avoid duplicate code @@ -97,6 +98,7 @@ public: CameraHalHisysevent::CreateMsg("PixelFormatToCameraFormat failed format:%d", format)); CAMERA_LOGI("not find format = %{public}u, use default format", static_cast(format)); } + CAMERA_LOGD("PixelFormatToCameraFormat: %{public}u -> %{public}u", format, cameraFormat); return cameraFormat; } diff --git a/camera/vdi_base/common/dump/src/camera_dump.cpp b/camera/vdi_base/common/dump/src/camera_dump.cpp index cc5be57b94d34eef1a2f52633458833ec6d79185..e0f79a67bb4acddcf304af7f37ca0cbe35b79cc6 100644 --- a/camera/vdi_base/common/dump/src/camera_dump.cpp +++ b/camera/vdi_base/common/dump/src/camera_dump.cpp @@ -180,7 +180,7 @@ bool CameraDumper::DumpBuffer(std::string name, std::string type, const std::sha ss << "]_" << GetCurrentLocalTimeStamp(); ss >> fileName; - fileName += ".yuv"; + fileName += (buffer->GetCurFormat() == CAMERA_FORMAT_BLOB ? ".jpeg" : ".yuv"); } return SaveDataToFile(fileName.c_str(), srcAddr, size); } diff --git a/camera/vdi_base/common/include/camera.h b/camera/vdi_base/common/include/camera.h index da087cd35b409e7768a4992afb719e2a87a8a1eb..48693c768fed9e69477652c82b8223cf3f9a7c9d 100644 --- a/camera/vdi_base/common/include/camera.h +++ b/camera/vdi_base/common/include/camera.h @@ -140,6 +140,7 @@ enum CameraBufferFormat : uint32_t { CAMERA_FORMAT_UYVY_422_PKG, /* UYVY422 packed format */ CAMERA_FORMAT_YVYU_422_PKG, /* YVYU422 packed format */ CAMERA_FORMAT_VYUY_422_PKG, /* VYUY422 packed format */ + CAMERA_FORMAT_BLOB, /* MJPEG packed format */ }; enum CameraEncodeType : int32_t { diff --git a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/device_manager/v4l2_device_manager.cpp b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/device_manager/v4l2_device_manager.cpp index a0cd6342724fc44f4736d901975f3c3e534c1cd7..509083579ee25c0ee48fd47c7af0ad31b5c96289 100644 --- a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/device_manager/v4l2_device_manager.cpp +++ b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/device_manager/v4l2_device_manager.cpp @@ -677,58 +677,101 @@ std::string VectorInt32ToString(std::vector vec) PS: '虚化'为OHOS_ABILITY_SCENE_PORTRAIT_EFFECT_TYPES, '滤镜'为OHOS_ABILITY_SCENE_FILTER_TYPES, '美颜'为OHOS_ABILITY_SCENE_BEAUTY_TYPES。 */ -void ChangeAbilityVectorFormat(std::vector &abilityVec, uint32_t format) -{ - constexpr uint32_t SkipNum = 7; - constexpr int32_t endMarker = -1; - if (abilityVec.size() == 0) { - CAMERA_LOGI("ChangeAbilityVectorFormat error, abilityVec.size() == 0"); - return; - } - abilityVec[0] = static_cast(format); - for (uint32_t index = SkipNum; index < abilityVec.size();) { - if (abilityVec[index - 1] == endMarker) { - abilityVec[index] = static_cast(format); - index += SkipNum; - } else { - index += 1; - } - } -} // {0, 3, 9, -1, 2, 3, 9, -1, 1, 3, -1} for pc std::vector g_defaultUsbCameraStreamFormatConfig = { - PREVIEW_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, OHOS_CAMERA_FORMAT_422_YUYV, -1, - CAPTURE_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, OHOS_CAMERA_FORMAT_422_YUYV, -1, - VIDEO_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, -1, + PREVIEW_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, OHOS_CAMERA_FORMAT_422_YUYV, OHOS_CAMERA_FORMAT_MJPEG, -1, + CAPTURE_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, OHOS_CAMERA_FORMAT_422_YUYV, OHOS_CAMERA_FORMAT_MJPEG, -1, + VIDEO_STREAM, OHOS_CAMERA_FORMAT_YCRCB_420_SP, OHOS_CAMERA_FORMAT_MJPEG, -1, }; -void V4L2DeviceManager::ConvertAbilityStreamAvailableExtendConfigurationsToOhos( - std::shared_ptr metadata, const std::vector& deviceFormat) +struct FormatInfoInner { + int32_t width; + int32_t height; + int32_t minFps; + int32_t maxFps; + bool isMjpeg; + bool isYuv; +}; + +static std::vector& ConvertDeviceFormat(const std::vector& deviceFormat) { - std::string name = "YUYV 4:2:2"; - std::vector formatVector; + static std::vector formatList = {}; + formatList.clear(); + const std::string nameYuv = "YUYV 4:2:2"; + const std::string nameMjpeg = "Motion-JPEG"; int32_t fpsValue = 0; uint32_t index = 0; for (auto& it : deviceFormat) { + if (it.fmtdesc.fps.numerator == 0) { + continue; + } fpsValue = it.fmtdesc.fps.denominator / it.fmtdesc.fps.numerator; - CAMERA_LOGI("formatVector[%{public}u], format = %{public}d * %{public}d @%{public}s, fps = %{public}d", + CAMERA_LOGI("ConvertDeviceFormat [%{public}u], format = %{public}d * %{public}d @%{public}s, fps = %{public}d", index, it.fmtdesc.width, it.fmtdesc.height, it.fmtdesc.description.c_str(), fpsValue); index++; - if (it.fmtdesc.description != name || it.fmtdesc.fps.numerator == 0) { + std::string formatDesc = std::to_string(it.fmtdesc.width) + "x" + std::to_string(it.fmtdesc.height); + auto fmt = std::find_if(formatList.begin(), formatList.end(), + [&] (const struct FormatInfoInner& format) { + return format.width == it.fmtdesc.width && format.height == it.fmtdesc.height; + }); + if (fmt == formatList.end()) { + FormatInfoInner format = { + .width = it.fmtdesc.width, + .height = it.fmtdesc.height, + .minFps = fpsValue, + .maxFps = fpsValue, + .isYuv = it.fmtdesc.description == nameYuv, + .isMjpeg = it.fmtdesc.description == nameMjpeg, + }; + formatList.push_back(format); + } else { + fmt->minFps = (fmt->minFps > fpsValue) ? fpsValue : fmt->minFps; + fmt->maxFps = (fmt->maxFps < fpsValue) ? fpsValue : fmt->maxFps; + fmt->isYuv = (fmt->isYuv || it.fmtdesc.description == nameYuv); + fmt->isMjpeg = (fmt->isMjpeg || it.fmtdesc.description == nameMjpeg); + } + } + std::sort(formatList.begin(), formatList.end(), + [](const struct FormatInfoInner& format1, const struct FormatInfoInner& format2) { + return (format1.width != format2.width) ? + format1.width < format2.width : format1.height < format2.height; + }); + return formatList; +} + +static std::vector GetFormatVector(const std::vector& formatInfo, + uint32_t format, uint32_t streamId) +{ + std::vector formatVector; + constexpr uint32_t minWidthForVideoStream = 160; + uint32_t minWidth = (streamId == VIDEO_STREAM) ? minWidthForVideoStream : 0; + for (auto& it : formatInfo) { + if (it.width <= minWidth) { continue; } - if (fpsValue > MINIMUM_FPS) { - formatVector.push_back(FORMAT); - formatVector.push_back(it.fmtdesc.width); - formatVector.push_back(it.fmtdesc.height); - formatVector.push_back(fpsValue); - formatVector.push_back(fpsValue); - formatVector.push_back(fpsValue); - formatVector.push_back(END_SYMBOL); + if (!it.isMjpeg && format == OHOS_CAMERA_FORMAT_MJPEG) { + CAMERA_LOGI("this format not support mjpeg, %{public}d x %{public}d @fps[%{public}d, %{public}d]", + it.width, it.height, it.minFps, it.maxFps); + continue; } + formatVector.push_back(format); + formatVector.push_back(it.width); + formatVector.push_back(it.height); + formatVector.push_back(it.maxFps); + formatVector.push_back(it.minFps); + formatVector.push_back(it.maxFps); + formatVector.push_back(END_SYMBOL); + CAMERA_LOGI("add format, %{public}d x %{public}d @fps[%{public}d, %{public}d]", + it.width, it.height, it.minFps, it.maxFps); } + return formatVector; +} +void V4L2DeviceManager::ConvertAbilityStreamAvailableExtendConfigurationsToOhos( + std::shared_ptr metadata, const std::vector& deviceFormat) +{ + auto &formatList = ConvertDeviceFormat(deviceFormat); std::vector streamAvailableExtendConfigurationsVector; streamAvailableExtendConfigurationsVector.push_back(0); int32_t streamId = -1; @@ -744,13 +787,13 @@ void V4L2DeviceManager::ConvertAbilityStreamAvailableExtendConfigurationsToOhos( streamAvailableExtendConfigurationsVector.push_back(streamId); continue; } - ChangeAbilityVectorFormat(formatVector, cur); + auto fmtVec = GetFormatVector(formatList, cur, streamId); streamAvailableExtendConfigurationsVector.insert(streamAvailableExtendConfigurationsVector.end(), - formatVector.begin(), formatVector.end()); + fmtVec.begin(), fmtVec.end()); } streamAvailableExtendConfigurationsVector.push_back(END_SYMBOL); - CAMERA_LOGD("config is:%{public}s", VectorInt32ToString(streamAvailableExtendConfigurationsVector).c_str()); + CAMERA_LOGI("config is:%{public}s", VectorInt32ToString(streamAvailableExtendConfigurationsVector).c_str()); AddOrUpdateOhosTag(metadata, OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, streamAvailableExtendConfigurationsVector); } diff --git a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.cpp b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.cpp index 0c179c55c80c6f20247938ac7c10e88d60ee85f4..bcd852835dfca527e06e1449e3288c104973f8c4 100644 --- a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.cpp +++ b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.cpp @@ -95,9 +95,12 @@ RetCode UvcNode::Start(const int32_t streamId) std::vector> outPorts = GetOutPorts(); for (const auto& it : outPorts) { DeviceFormat format; - format.fmtdesc.pixelformat = V4L2_PIX_FMT_YUYV; + cameraformat_ = it->format_.format_; + format.fmtdesc.pixelformat = (cameraformat_ == CAMERA_FORMAT_BLOB ? V4L2_PIX_FMT_MJPEG : V4L2_PIX_FMT_YUYV); format.fmtdesc.width = static_cast(wide_); format.fmtdesc.height = static_cast(high_); + CAMERA_LOGI("UvcNode::Start width: %{public}d, height: %{public}d, format: %{public}u, pixelformat: %{public}s", + wide_, high_, it->format_.format_, format.fmtdesc.pixelformat == V4L2_PIX_FMT_YUYV ? "yuv" : "mjpeg"); int bufCnt = static_cast(it->format_.bufferCount_); rc = sensorController_->Start(bufCnt, format); if (rc == RC_ERROR) { @@ -196,6 +199,13 @@ static void SetImageAllBlack(uint8_t *buf, size_t bufferSize, uint32_t format) buf[i] = 0; buf[i + 1] = 0x80; } + return; + } + if (format == CAMERA_FORMAT_BLOB) { + if (memset_s(buf, bufferSize, 0, bufferSize) != EOK) { + CAMERA_LOGE("SetImageAllBlack memset_s failed"); + } + return; } } @@ -208,21 +218,21 @@ void UvcNode::DeliverBuffer(std::shared_ptr& buffer) CAMERA_LOGI("UvcNode::DeliverBuffer Begin, streamId[%{public}d], index[%{public}d]", buffer->GetStreamId(), buffer->GetIndex()); - buffer->SetCurFormat(CAMERA_FORMAT_YUYV_422_PKG); buffer->SetCurWidth(wide_); buffer->SetCurHeight(high_); if (MetadataController::GetInstance().IsMute()) { - SetImageAllBlack((uint8_t *)buffer->GetVirAddress(), buffer->GetSize(), CAMERA_FORMAT_YUYV_422_PKG); + SetImageAllBlack((uint8_t *)buffer->GetVirAddress(), buffer->GetSize(), buffer->GetCurFormat()); } SourceNode::DeliverBuffer(buffer); return; } - RetCode UvcNode::ProvideBuffers(std::shared_ptr frameSpec) { CAMERA_LOGI("UvcNode::ProvideBuffers enter. %{public}s", sensorController_->GetName().c_str()); + frameSpec->buffer_->SetCurFormat( + cameraformat_ == CAMERA_FORMAT_BLOB ? CAMERA_FORMAT_BLOB : CAMERA_FORMAT_YUYV_422_PKG); if (sensorController_->SendFrameBuffer(frameSpec) == RC_OK) { CAMERA_LOGD("Sendframebuffer success bufferpool id = %llu", frameSpec->bufferPoolId_); return RC_OK; diff --git a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.h b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.h index e2a1947ed85caa8d172459e224a0f61df0e67a62..4390d0eca938eb2811df3b03bb116fcab98d7e8d 100644 --- a/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.h +++ b/camera/vdi_base/usb_camera/adapter/platform/v4l2/src/pipeline_core/nodes/uvc_node/uvc_node.h @@ -48,6 +48,7 @@ private: std::shared_ptr sensorController_ = nullptr; std::shared_ptr deviceManager_ = nullptr; std::shared_ptr meta_ = nullptr; + uint32_t cameraformat_ = 0; }; } // namespace OHOS::Camera #endif diff --git a/camera/vdi_base/usb_camera/pipeline_core/src/node/codec_node.cpp b/camera/vdi_base/usb_camera/pipeline_core/src/node/codec_node.cpp index 2f0128b88f4fa873b9420c2b43350c01aeb2582c..af0bf58db7c1e37e4e40629fbddb6c32928fc696 100644 --- a/camera/vdi_base/usb_camera/pipeline_core/src/node/codec_node.cpp +++ b/camera/vdi_base/usb_camera/pipeline_core/src/node/codec_node.cpp @@ -280,10 +280,14 @@ void CodecNode::DeliverBuffer(std::shared_ptr& buffer) return NodeBase::DeliverBuffer(buffer); } + if (buffer->GetCurFormat() == CAMERA_FORMAT_BLOB) { + return NodeBase::DeliverBuffer(buffer); + } + int32_t id = buffer->GetStreamId(); CAMERA_LOGI("CodecNode::DeliverBuffer, streamId[%{public}d], index[%{public}d],\ - format = %{public}d, encode = %{public}d", - id, buffer->GetIndex(), buffer->GetFormat(), buffer->GetEncodeType()); + format = %{public}d, encode = %{public}d, stride = %{public}d", + id, buffer->GetIndex(), buffer->GetFormat(), buffer->GetEncodeType(), buffer->GetStride()); if (buffer->GetEncodeType() == ENCODE_TYPE_JPEG) { Yuv422ToJpeg(buffer); @@ -301,6 +305,10 @@ void CodecNode::DeliverBuffer(std::shared_ptr& buffer) buffer->SetEsKeyFrame(0); } + if (buffer->GetStride() != buffer->GetWidth()) { + NodeUtils::BufferTransformForStride(buffer); + } + CameraDumper& dumper = CameraDumper::GetInstance(); dumper.DumpBuffer("CodecNode", ENABLE_CODEC_NODE_CONVERTED, buffer); diff --git a/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.cpp b/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.cpp index e86c574eb9f6a4e3265782a01eae201bffea3afb..b88d976c60cd2aa95328585bd7172164177c0b31 100644 --- a/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.cpp +++ b/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.cpp @@ -144,4 +144,87 @@ void NodeUtils::BufferScaleFormatTransform(std::shared_ptr& buffer, voi buffer->SetIsValidDataInSurfaceBuffer(true); } } + +static void AddStrideToNV21(uint8_t* buffer, uint8_t* bufferMAX, int width, int height, int newStride) +{ + const int yPlaneSize = width * height; + const int uvPlaneSize = yPlaneSize / 2; + const int totalSize = yPlaneSize + uvPlaneSize; + + // 1. 计算需要扩展的总空间 + int newYPlaneSize = height * newStride; + int newUvPlaneSize = (height / 2) * newStride; + + // 2. 从后向前处理UV平面 + uint8_t* uvEnd = buffer + totalSize; + for (int y = (height / 2) - 1; y >= 0; --y) { + uint8_t* src = buffer + yPlaneSize + y * width; + uint8_t* dst = buffer + newYPlaneSize + y * newStride; + if (memmove_s(dst, bufferMAX - dst, src, width) != 0) { + CAMERA_LOGE("AddStrideToNV21 memmove_s Fail 1"); + return; + } + if (newStride > width) { + if (memset_s(dst + width, bufferMAX - (dst + width), 0, newStride - width)) { + CAMERA_LOGE("AddStrideToNV21 memset_s Fail 1"); + return; + } + } + } + + // 3. 从后向前处理Y平面 + for (int y = height - 1; y >= 0; --y) { + uint8_t* src = buffer + y * width; + uint8_t* dst = buffer + y * newStride; + if (memmove_s(dst, bufferMAX - dst, src, width) != 0) { + CAMERA_LOGE("AddStrideToNV21 memmove_s Fail 2"); + return; + } + if (newStride > width) { + if (memset_s(dst + width, bufferMAX - (dst + width), 0, newStride - width)) { + CAMERA_LOGE("AddStrideToNV21 memset_s Fail 2"); + return; + } + } + } +} + +void NodeUtils::BufferTransformForStride(std::shared_ptr& buffer) +{ + if (buffer == nullptr) { + CAMERA_LOGI("BufferScaleFormatTransform Error buffer == nullptr"); + return; + } + + if (buffer->GetCurWidth() != buffer->GetWidth() + || buffer->GetCurHeight() != buffer->GetHeight() + || buffer->GetCurFormat() != buffer->GetFormat()) { + CAMERA_LOGI("width, width or format is not all the same"); + return; + } + + if (buffer->GetWidth() == buffer->GetStride()) { + CAMERA_LOGI("buffer->GetWidth() == buffer->GetStride(), no need stride"); + return; + } + + if (buffer->GetIsValidDataInSurfaceBuffer()) { + CAMERA_LOGE("IsValidDataInSurfaceBuffer true"); + if (memcpy_s(buffer->GetVirAddress(), buffer->GetSize(), + buffer->GetSuffaceBufferAddr(), buffer->GetSuffaceBufferSize()) != 0) { + CAMERA_LOGE("BufferScaleFormatTransform Fail, memcpy_s error"); + return; + } + } + + uint8_t* bufferForStride = (uint8_t*)buffer->GetSuffaceBufferAddr(); + uint8_t* bufferForStrideMax = bufferForStride + buffer->GetSuffaceBufferSize(); + auto dstAVFmt = ConvertOhosFormat2AVPixelFormat(buffer->GetFormat()); + if (dstAVFmt == AV_PIX_FMT_NV21 || dstAVFmt == AV_PIX_FMT_NV12) { + AddStrideToNV21(bufferForStride, bufferForStrideMax, + buffer->GetWidth(), buffer->GetHeight(), buffer->GetStride()); + } else { + CAMERA_LOGE("format not supported for stride, format = %{public}d", buffer->GetFormat()); + } +} }; \ No newline at end of file diff --git a/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.h b/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.h index b0d9e90509b77110d9f3978d80dd54ad79825310..963149de35925e624437f7a1815a39a269b91cca 100644 --- a/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.h +++ b/camera/vdi_base/usb_camera/pipeline_core/src/node/node_utils.h @@ -23,6 +23,7 @@ namespace OHOS::Camera { static int32_t ImageFormatConvert(ImageBufferInfo &srcBufferInfo, ImageBufferInfo &dstBufferInfo); static void BufferScaleFormatTransform(std::shared_ptr& buffer, void *dstBuffer = nullptr, uint32_t dstBufferSize = 0); + static void BufferTransformForStride(std::shared_ptr& buffer); struct ImageBufferInfo { int32_t width; diff --git a/camera/vdi_base/usb_camera/pipeline_core/src/node/scale_node.cpp b/camera/vdi_base/usb_camera/pipeline_core/src/node/scale_node.cpp index 4a71da077c946eb6bfe0602b5083e4f6f1f044b7..278e8118bcaef1b9eaa9ddbcf0637b14f4b477da 100644 --- a/camera/vdi_base/usb_camera/pipeline_core/src/node/scale_node.cpp +++ b/camera/vdi_base/usb_camera/pipeline_core/src/node/scale_node.cpp @@ -55,6 +55,9 @@ void ScaleNode::DeliverBuffer(std::shared_ptr& buffer) CAMERA_LOGE("ScaleNode::DeliverBuffer frameSpec is null"); return; } + if (buffer->GetCurFormat() == CAMERA_FORMAT_BLOB) { + return NodeBase::DeliverBuffer(buffer); + } if (buffer->GetBufferStatus() != CAMERA_BUFFER_STATUS_OK) { CAMERA_LOGE("BufferStatus() != CAMERA_BUFFER_STATUS_OK"); diff --git a/camera/vdi_base/v4l2/src/stream_operator/stream_tunnel/standard/stream_tunnel.cpp b/camera/vdi_base/v4l2/src/stream_operator/stream_tunnel/standard/stream_tunnel.cpp index 9b2482bcd363e925ff2694127669dd8762be7c24..9045b3fe43576613caadb6d9b6367fc0ebf8076a 100644 --- a/camera/vdi_base/v4l2/src/stream_operator/stream_tunnel/standard/stream_tunnel.cpp +++ b/camera/vdi_base/v4l2/src/stream_operator/stream_tunnel/standard/stream_tunnel.cpp @@ -60,20 +60,23 @@ void StreamTunnel::CleanBuffers() index = -1; } +const int32_t BLOB_MAX_SIZE = 24 * 1024 * 1024; + std::shared_ptr StreamTunnel::GetBuffer() { CHECK_IF_PTR_NULL_RETURN_VALUE(bufferQueue_, nullptr); OHOS::sptr sb = nullptr; int32_t fence = 0; - constexpr int32_t SLEEP_TIME = 2000; + constexpr int32_t sleepTime = 2000; int32_t timtCount = 0; OHOS::SurfaceError sfError = OHOS::SURFACE_ERROR_OK; + auto tmpConfig = requestConfig_; do { - sfError = bufferQueue_->RequestBuffer(sb, fence, requestConfig_); + sfError = bufferQueue_->RequestBuffer(sb, fence, tmpConfig); if (sfError == OHOS::SURFACE_ERROR_NO_BUFFER) { std::unique_lock l(waitLock_); waitCV_.wait(l, [this] { return wakeup_ == true; }); - usleep(SLEEP_TIME); + usleep(sleepTime); timtCount++; } if (fence != -1) { @@ -83,7 +86,7 @@ std::shared_ptr StreamTunnel::GetBuffer() } while (!stop_ && sfError == OHOS::SURFACE_ERROR_NO_BUFFER); wakeup_ = false; CAMERA_LOGI("bufferQueue_->RequestBuffer Done, sfError = %{public}d, cast time = %{public}d us", - sfError, timtCount * SLEEP_TIME); + sfError, timtCount * sleepTime); if (stop_) { if (sb != nullptr) { @@ -125,8 +128,9 @@ static void PrepareBufferBeforeFlush(const std::shared_ptr& buffer, con } } if (!buffer->GetIsValidDataInSurfaceBuffer()) { + uint32_t availableSize = esInfo.size > 0 ? esInfo.size : buffer->GetSize(); CAMERA_LOGI("copy data from cb to sb, size = %{public}d", sb->GetSize()); - auto ret = memcpy_s(sb->GetVirAddr(), sb->GetSize(), buffer->GetVirAddress(), sb->GetSize()); + auto ret = memcpy_s(sb->GetVirAddr(), sb->GetSize(), buffer->GetVirAddress(), availableSize); if (ret != 0) { CAMERA_LOGE("PrepareBufferBeforeFlush memcpy_s fail, error = %{public}d", ret); }