From patchwork Sat Apr 4 01:16:24 2020 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: Laurent Pinchart X-Patchwork-Id: 3389 Return-Path: Received: from perceval.ideasonboard.com (perceval.ideasonboard.com [213.167.242.64]) by lancelot.ideasonboard.com (Postfix) with ESMTPS id 8A08960409 for ; Sat, 4 Apr 2020 03:16:38 +0200 (CEST) Authentication-Results: lancelot.ideasonboard.com; dkim=pass (1024-bit key; unprotected) header.d=ideasonboard.com header.i=@ideasonboard.com header.b="p4yPhB5y"; dkim-atps=neutral Received: from pendragon.bb.dnainternet.fi (81-175-216-236.bb.dnainternet.fi [81.175.216.236]) by perceval.ideasonboard.com (Postfix) with ESMTPSA id E9550321; Sat, 4 Apr 2020 03:16:37 +0200 (CEST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com; s=mail; t=1585962998; bh=Gh6eGiCVZoiIvC6IvOZ9ipOxTaFpb00WKQf2eMwsTPI=; h=From:To:Cc:Subject:Date:In-Reply-To:References:From; b=p4yPhB5yu6Y8c1+zmmfLMisDKp8hD2m81dMQyDo7Dow27JHOps1Hkq4K8tOFVC85G Fe/+OBDpbDV41NCDywCx/1ivSrWfHHhj3Bl55AbVBkPxvm+OYXV50jII+tmDv4mIfL x85W5NfGKkM6wajhJdi4kYGOhOq77rvNGolQAXKw= From: Laurent Pinchart To: libcamera-devel@lists.libcamera.org Date: Sat, 4 Apr 2020 04:16:24 +0300 Message-Id: <20200404011624.19728-1-laurent.pinchart@ideasonboard.com> X-Mailer: git-send-email 2.24.1 In-Reply-To: <20200404004438.17992-1-laurent.pinchart@ideasonboard.com> References: <20200404004438.17992-1-laurent.pinchart@ideasonboard.com> MIME-Version: 1.0 Subject: [libcamera-devel] [PATCH v4 12/11] libcamera: pipeline: simple: Support multiple capture video nodes X-BeenThere: libcamera-devel@lists.libcamera.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-List-Received-Date: Sat, 04 Apr 2020 01:16:38 -0000 The simple pipeline handler rejects devices that have multiple capture video nodes. There's no real reason to do so, a more dynamic approach is possible as the pipeline handler already locates the video device by walking the media graph. Rework the match sequence by skipping any check on the video nodes, and create the V4L2VideoDevice for the media entity at the end of the pipeline when initializing the camera data. The V4L2VideoDevice instances are managed by the pipeline handler itself, to avoid creating separate instances in the camera data if multiple sensors are routed to the same video device. Signed-off-by: Laurent Pinchart Reviewed-by: Andrey Konovalov Reviewed-by: Niklas Söderlund --- src/libcamera/pipeline/simple/simple.cpp | 136 +++++++++++++---------- 1 file changed, 75 insertions(+), 61 deletions(-) Andrey, does this fix your issue with the qcom-camss ? diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp index 8e7dd091b4ab..b5f9177dd383 100644 --- a/src/libcamera/pipeline/simple/simple.cpp +++ b/src/libcamera/pipeline/simple/simple.cpp @@ -57,8 +57,7 @@ static const SimplePipelineInfo supportedDevices[] = { class SimpleCameraData : public CameraData { public: - SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor, - MediaEntity *video); + SimpleCameraData(SimplePipelineHandler *pipe, MediaEntity *sensor); bool isValid() const { return sensor_ != nullptr; } std::set streams() { return { &stream_ }; } @@ -82,6 +81,7 @@ public: Stream stream_; std::unique_ptr sensor_; std::list entities_; + V4L2VideoDevice *video_; std::vector configs_; std::map formats_; @@ -129,7 +129,7 @@ public: bool match(DeviceEnumerator *enumerator) override; - V4L2VideoDevice *video() { return video_; } + V4L2VideoDevice *video(const MediaEntity *entity); V4L2Subdevice *subdev(const MediaEntity *entity); SimpleConverter *converter() { return converter_; } @@ -151,7 +151,7 @@ private: void converterDone(FrameBuffer *input, FrameBuffer *output); MediaDevice *media_; - V4L2VideoDevice *video_; + std::map> videos_; std::map subdevs_; SimpleConverter *converter_; @@ -166,8 +166,8 @@ private: * Camera Data */ -SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor, - MediaEntity *video) +SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe, + MediaEntity *sensor) : CameraData(pipe) { int ret; @@ -179,8 +179,8 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor, MediaEntity *source = sensor; while (source) { - /* If we have reached the video node, we're done. */ - if (source == video) + /* If we have reached a video node, we're done. */ + if (source->function() == MEDIA_ENT_F_IO_V4L) break; /* Use the first output pad that has links. */ @@ -224,7 +224,14 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor, } } - /* We have a valid pipeline, create the camera sensor. */ + /* + * We have a valid pipeline, get the video device and create the camera + * sensor. + */ + video_ = pipe->video(source); + if (!video_) + return; + sensor_ = std::make_unique(sensor); ret = sensor_->init(); if (ret) { @@ -236,7 +243,6 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor, int SimpleCameraData::init() { SimplePipelineHandler *pipe = static_cast(pipe_); - V4L2VideoDevice *video = pipe->video(); SimpleConverter *converter = pipe->converter(); int ret; @@ -266,7 +272,7 @@ int SimpleCameraData::init() } std::map> videoFormats = - video->formats(format.mbus_code); + video_->formats(format.mbus_code); LOG(SimplePipeline, Debug) << "Adding configuration for " << format.size.toString() @@ -285,7 +291,7 @@ int SimpleCameraData::init() * PixelFormat is achieved. */ for (const auto &videoFormat : videoFormats) { - PixelFormat pixelFormat = video->toPixelFormat(videoFormat.first); + PixelFormat pixelFormat = video_->toPixelFormat(videoFormat.first); if (!pixelFormat) continue; @@ -451,13 +457,12 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate() */ SimplePipelineHandler::SimplePipelineHandler(CameraManager *manager) - : PipelineHandler(manager), video_(nullptr), converter_(nullptr) + : PipelineHandler(manager), converter_(nullptr) { } SimplePipelineHandler::~SimplePipelineHandler() { - delete video_; delete converter_; } @@ -503,6 +508,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c) SimpleCameraConfiguration *config = static_cast(c); SimpleCameraData *data = cameraData(camera); + V4L2VideoDevice *video = data->video_; StreamConfiguration &cfg = config->at(0); int ret; @@ -524,13 +530,13 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c) return ret; /* Configure the video node. */ - V4L2PixelFormat videoFormat = video_->toV4L2PixelFormat(pipeConfig.pixelFormat); + V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig.pixelFormat); V4L2DeviceFormat captureFormat = {}; captureFormat.fourcc = videoFormat; captureFormat.size = cfg.size; - ret = video_->setFormat(&captureFormat); + ret = video->setFormat(&captureFormat); if (ret) return ret; @@ -563,6 +569,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c) int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream, std::vector> *buffers) { + SimpleCameraData *data = cameraData(camera); unsigned int count = stream->configuration().bufferCount; /* @@ -572,23 +579,24 @@ int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream, if (useConverter_) return converter_->exportBuffers(count, buffers); else - return video_->exportBuffers(count, buffers); + return data->video_->exportBuffers(count, buffers); } int SimplePipelineHandler::start(Camera *camera) { SimpleCameraData *data = cameraData(camera); + V4L2VideoDevice *video = data->video_; unsigned int count = data->stream_.configuration().bufferCount; int ret; if (useConverter_) - ret = video_->allocateBuffers(count, &converterBuffers_); + ret = video->allocateBuffers(count, &converterBuffers_); else - ret = video_->importBuffers(count); + ret = video->importBuffers(count); if (ret < 0) return ret; - ret = video_->streamOn(); + ret = video->streamOn(); if (ret < 0) { stop(camera); return ret; @@ -603,7 +611,7 @@ int SimplePipelineHandler::start(Camera *camera) /* Queue all internal buffers for capture. */ for (std::unique_ptr &buffer : converterBuffers_) - video_->queueBuffer(buffer.get()); + video->queueBuffer(buffer.get()); } activeCamera_ = camera; @@ -613,11 +621,14 @@ int SimplePipelineHandler::start(Camera *camera) void SimplePipelineHandler::stop(Camera *camera) { + SimpleCameraData *data = cameraData(camera); + V4L2VideoDevice *video = data->video_; + if (useConverter_) converter_->stop(); - video_->streamOff(); - video_->releaseBuffers(); + video->streamOff(); + video->releaseBuffers(); converterBuffers_.clear(); activeCamera_ = nullptr; @@ -644,7 +655,7 @@ int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request) return 0; } - return video_->queueBuffer(buffer); + return data->video_->queueBuffer(buffer); } /* ----------------------------------------------------------------------------- @@ -672,12 +683,8 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator) if (!media_) return false; - /* - * Locate sensors and video nodes. We only support pipelines with at - * least one sensor and exactly one video capture node. - */ + /* Locate the sensors. */ std::vector sensors; - std::vector videos; for (MediaEntity *entity : media_->entities()) { switch (entity->function()) { @@ -685,12 +692,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator) sensors.push_back(entity); break; - case MEDIA_ENT_F_IO_V4L: - if (entity->pads().size() == 1 && - (entity->pads()[0]->flags() & MEDIA_PAD_FL_SINK)) - videos.push_back(entity); - break; - default: break; } @@ -701,26 +702,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator) return false; } - if (videos.size() != 1) { - LOG(SimplePipeline, Error) - << "Pipeline with " << videos.size() - << " video capture nodes is not supported"; - return false; - } - - /* Locate and open the capture video node. */ - video_ = new V4L2VideoDevice(videos[0]); - if (video_->open() < 0) - return false; - - if (video_->caps().isMultiplanar()) { - LOG(SimplePipeline, Error) - << "V4L2 multiplanar devices are not supported"; - return false; - } - - video_->bufferReady.connect(this, &SimplePipelineHandler::bufferReady); - /* Open the converter, if any. */ if (converter) { converter_ = new SimpleConverter(converter); @@ -745,8 +726,7 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator) for (MediaEntity *sensor : sensors) { std::unique_ptr data = - std::make_unique(this, sensor, - videos[0]); + std::make_unique(this, sensor); if (!data->isValid()) { LOG(SimplePipeline, Error) << "No valid pipeline for sensor '" @@ -793,6 +773,35 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator) return true; } +V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity) +{ + /* + * Return the V4L2VideoDevice corresponding to the media entity, either + * as a previously constructed device if available from the cache, or + * by constructing a new one. + */ + + auto iter = videos_.find(entity); + if (iter != videos_.end()) + return iter->second.get(); + + std::unique_ptr video = + std::make_unique(entity); + if (video->open() < 0) + return nullptr; + + if (video->caps().isMultiplanar()) { + LOG(SimplePipeline, Error) + << "V4L2 multiplanar devices are not supported"; + return nullptr; + } + + video->bufferReady.connect(this, &SimplePipelineHandler::bufferReady); + + auto element = videos_.emplace(entity, std::move(video)); + return element.first->second.get(); +} + V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity) { auto iter = subdevs_.find(entity); @@ -808,6 +817,9 @@ V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity) void SimplePipelineHandler::bufferReady(FrameBuffer *buffer) { + ASSERT(activeCamera_); + SimpleCameraData *data = cameraData(activeCamera_); + /* * If an error occurred during capture, or if the buffer was cancelled, * complete the request, even if the converter is in use as there's no @@ -816,7 +828,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer) if (buffer->metadata().status != FrameMetadata::FrameSuccess) { if (useConverter_) { /* Requeue the buffer for capture. */ - video_->queueBuffer(buffer); + data->video_->queueBuffer(buffer); /* * Get the next user-facing buffer to complete the @@ -842,7 +854,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer) */ if (useConverter_) { if (converterQueue_.empty()) { - video_->queueBuffer(buffer); + data->video_->queueBuffer(buffer); return; } @@ -862,14 +874,16 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer) void SimplePipelineHandler::converterDone(FrameBuffer *input, FrameBuffer *output) { - /* Complete the request. */ ASSERT(activeCamera_); + SimpleCameraData *data = cameraData(activeCamera_); + + /* Complete the request. */ Request *request = output->request(); completeBuffer(activeCamera_, request, output); completeRequest(activeCamera_, request); /* Queue the input buffer back for capture. */ - video_->queueBuffer(input); + data->video_->queueBuffer(input); } REGISTER_PIPELINE_HANDLER(SimplePipelineHandler);