[libcamera-devel,v4,12/11] libcamera: pipeline: simple: Support multiple capture video nodes

Message ID 20200404011624.19728-1-laurent.pinchart@ideasonboard.com
State Superseded
Headers show
Series
  • Simple pipeline handler
Related show

Commit Message

Laurent Pinchart April 4, 2020, 1:16 a.m. UTC
The simple pipeline handler rejects devices that have multiple capture
video nodes. There's no real reason to do so, a more dynamic approach is
possible as the pipeline handler already locates the video device by
walking the media graph.

Rework the match sequence by skipping any check on the video nodes, and
create the V4L2VideoDevice for the media entity at the end of the
pipeline when initializing the camera data. The V4L2VideoDevice
instances are managed by the pipeline handler itself, to avoid creating
separate instances in the camera data if multiple sensors are routed to
the same video device.

Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
---
 src/libcamera/pipeline/simple/simple.cpp | 136 +++++++++++++----------
 1 file changed, 75 insertions(+), 61 deletions(-)

Andrey, does this fix your issue with the qcom-camss ?

Comments

Andrey Konovalov April 8, 2020, 5:06 p.m. UTC | #1
Hi Laurent,

Thank you for the patch!

On 04.04.2020 04:16, Laurent Pinchart wrote:
> The simple pipeline handler rejects devices that have multiple capture
> video nodes. There's no real reason to do so, a more dynamic approach is
> possible as the pipeline handler already locates the video device by
> walking the media graph.
> 
> Rework the match sequence by skipping any check on the video nodes, and
> create the V4L2VideoDevice for the media entity at the end of the
> pipeline when initializing the camera data. The V4L2VideoDevice
> instances are managed by the pipeline handler itself, to avoid creating
> separate instances in the camera data if multiple sensors are routed to
> the same video device.
> 
> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> ---
>   src/libcamera/pipeline/simple/simple.cpp | 136 +++++++++++++----------
>   1 file changed, 75 insertions(+), 61 deletions(-)
> 
> Andrey, does this fix your issue with the qcom-camss ?

Yes, it does. Thanks!

Reviewed-by: Andrey Konovalov <andrey.konovalov@linaro.org>

> diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp
> index 8e7dd091b4ab..b5f9177dd383 100644
> --- a/src/libcamera/pipeline/simple/simple.cpp
> +++ b/src/libcamera/pipeline/simple/simple.cpp
> @@ -57,8 +57,7 @@ static const SimplePipelineInfo supportedDevices[] = {
>   class SimpleCameraData : public CameraData
>   {
>   public:
> -	SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
> -			 MediaEntity *video);
> +	SimpleCameraData(SimplePipelineHandler *pipe, MediaEntity *sensor);
>   
>   	bool isValid() const { return sensor_ != nullptr; }
>   	std::set<Stream *> streams() { return { &stream_ }; }
> @@ -82,6 +81,7 @@ public:
>   	Stream stream_;
>   	std::unique_ptr<CameraSensor> sensor_;
>   	std::list<Entity> entities_;
> +	V4L2VideoDevice *video_;
>   
>   	std::vector<Configuration> configs_;
>   	std::map<PixelFormat, Configuration> formats_;
> @@ -129,7 +129,7 @@ public:
>   
>   	bool match(DeviceEnumerator *enumerator) override;
>   
> -	V4L2VideoDevice *video() { return video_; }
> +	V4L2VideoDevice *video(const MediaEntity *entity);
>   	V4L2Subdevice *subdev(const MediaEntity *entity);
>   	SimpleConverter *converter() { return converter_; }
>   
> @@ -151,7 +151,7 @@ private:
>   	void converterDone(FrameBuffer *input, FrameBuffer *output);
>   
>   	MediaDevice *media_;
> -	V4L2VideoDevice *video_;
> +	std::map<const MediaEntity *, std::unique_ptr<V4L2VideoDevice>> videos_;
>   	std::map<const MediaEntity *, V4L2Subdevice> subdevs_;
>   
>   	SimpleConverter *converter_;
> @@ -166,8 +166,8 @@ private:
>    * Camera Data
>    */
>   
> -SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
> -				   MediaEntity *video)
> +SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe,
> +				   MediaEntity *sensor)
>   	: CameraData(pipe)
>   {
>   	int ret;
> @@ -179,8 +179,8 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>   	MediaEntity *source = sensor;
>   
>   	while (source) {
> -		/* If we have reached the video node, we're done. */
> -		if (source == video)
> +		/* If we have reached a video node, we're done. */
> +		if (source->function() == MEDIA_ENT_F_IO_V4L)
>   			break;
>   
>   		/* Use the first output pad that has links. */
> @@ -224,7 +224,14 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>   		}
>   	}
>   
> -	/* We have a valid pipeline, create the camera sensor. */
> +	/*
> +	 * We have a valid pipeline, get the video device and create the camera
> +	 * sensor.
> +	 */
> +	video_ = pipe->video(source);
> +	if (!video_)
> +		return;
> +
>   	sensor_ = std::make_unique<CameraSensor>(sensor);
>   	ret = sensor_->init();
>   	if (ret) {
> @@ -236,7 +243,6 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>   int SimpleCameraData::init()
>   {
>   	SimplePipelineHandler *pipe = static_cast<SimplePipelineHandler *>(pipe_);
> -	V4L2VideoDevice *video = pipe->video();
>   	SimpleConverter *converter = pipe->converter();
>   	int ret;
>   
> @@ -266,7 +272,7 @@ int SimpleCameraData::init()
>   		}
>   
>   		std::map<V4L2PixelFormat, std::vector<SizeRange>> videoFormats =
> -			video->formats(format.mbus_code);
> +			video_->formats(format.mbus_code);
>   
>   		LOG(SimplePipeline, Debug)
>   			<< "Adding configuration for " << format.size.toString()
> @@ -285,7 +291,7 @@ int SimpleCameraData::init()
>   		 * PixelFormat is achieved.
>   		 */
>   		for (const auto &videoFormat : videoFormats) {
> -			PixelFormat pixelFormat = video->toPixelFormat(videoFormat.first);
> +			PixelFormat pixelFormat = video_->toPixelFormat(videoFormat.first);
>   			if (!pixelFormat)
>   				continue;
>   
> @@ -451,13 +457,12 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
>    */
>   
>   SimplePipelineHandler::SimplePipelineHandler(CameraManager *manager)
> -	: PipelineHandler(manager), video_(nullptr), converter_(nullptr)
> +	: PipelineHandler(manager), converter_(nullptr)
>   {
>   }
>   
>   SimplePipelineHandler::~SimplePipelineHandler()
>   {
> -	delete video_;
>   	delete converter_;
>   }
>   
> @@ -503,6 +508,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>   	SimpleCameraConfiguration *config =
>   		static_cast<SimpleCameraConfiguration *>(c);
>   	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
>   	StreamConfiguration &cfg = config->at(0);
>   	int ret;
>   
> @@ -524,13 +530,13 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>   		return ret;
>   
>   	/* Configure the video node. */
> -	V4L2PixelFormat videoFormat = video_->toV4L2PixelFormat(pipeConfig.pixelFormat);
> +	V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig.pixelFormat);
>   
>   	V4L2DeviceFormat captureFormat = {};
>   	captureFormat.fourcc = videoFormat;
>   	captureFormat.size = cfg.size;
>   
> -	ret = video_->setFormat(&captureFormat);
> +	ret = video->setFormat(&captureFormat);
>   	if (ret)
>   		return ret;
>   
> @@ -563,6 +569,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>   int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
>   					      std::vector<std::unique_ptr<FrameBuffer>> *buffers)
>   {
> +	SimpleCameraData *data = cameraData(camera);
>   	unsigned int count = stream->configuration().bufferCount;
>   
>   	/*
> @@ -572,23 +579,24 @@ int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
>   	if (useConverter_)
>   		return converter_->exportBuffers(count, buffers);
>   	else
> -		return video_->exportBuffers(count, buffers);
> +		return data->video_->exportBuffers(count, buffers);
>   }
>   
>   int SimplePipelineHandler::start(Camera *camera)
>   {
>   	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
>   	unsigned int count = data->stream_.configuration().bufferCount;
>   	int ret;
>   
>   	if (useConverter_)
> -		ret = video_->allocateBuffers(count, &converterBuffers_);
> +		ret = video->allocateBuffers(count, &converterBuffers_);
>   	else
> -		ret = video_->importBuffers(count);
> +		ret = video->importBuffers(count);
>   	if (ret < 0)
>   		return ret;
>   
> -	ret = video_->streamOn();
> +	ret = video->streamOn();
>   	if (ret < 0) {
>   		stop(camera);
>   		return ret;
> @@ -603,7 +611,7 @@ int SimplePipelineHandler::start(Camera *camera)
>   
>   		/* Queue all internal buffers for capture. */
>   		for (std::unique_ptr<FrameBuffer> &buffer : converterBuffers_)
> -			video_->queueBuffer(buffer.get());
> +			video->queueBuffer(buffer.get());
>   	}
>   
>   	activeCamera_ = camera;
> @@ -613,11 +621,14 @@ int SimplePipelineHandler::start(Camera *camera)
>   
>   void SimplePipelineHandler::stop(Camera *camera)
>   {
> +	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
> +
>   	if (useConverter_)
>   		converter_->stop();
>   
> -	video_->streamOff();
> -	video_->releaseBuffers();
> +	video->streamOff();
> +	video->releaseBuffers();
>   
>   	converterBuffers_.clear();
>   	activeCamera_ = nullptr;
> @@ -644,7 +655,7 @@ int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request)
>   		return 0;
>   	}
>   
> -	return video_->queueBuffer(buffer);
> +	return data->video_->queueBuffer(buffer);
>   }
>   
>   /* -----------------------------------------------------------------------------
> @@ -672,12 +683,8 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>   	if (!media_)
>   		return false;
>   
> -	/*
> -	 * Locate sensors and video nodes. We only support pipelines with at
> -	 * least one sensor and exactly one video capture node.
> -	 */
> +	/* Locate the sensors. */
>   	std::vector<MediaEntity *> sensors;
> -	std::vector<MediaEntity *> videos;
>   
>   	for (MediaEntity *entity : media_->entities()) {
>   		switch (entity->function()) {
> @@ -685,12 +692,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>   			sensors.push_back(entity);
>   			break;
>   
> -		case MEDIA_ENT_F_IO_V4L:
> -			if (entity->pads().size() == 1 &&
> -			    (entity->pads()[0]->flags() & MEDIA_PAD_FL_SINK))
> -				videos.push_back(entity);
> -			break;
> -
>   		default:
>   			break;
>   		}
> @@ -701,26 +702,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>   		return false;
>   	}
>   
> -	if (videos.size() != 1) {
> -		LOG(SimplePipeline, Error)
> -			<< "Pipeline with " << videos.size()
> -			<< " video capture nodes is not supported";
> -		return false;
> -	}
> -
> -	/* Locate and open the capture video node. */
> -	video_ = new V4L2VideoDevice(videos[0]);
> -	if (video_->open() < 0)
> -		return false;
> -
> -	if (video_->caps().isMultiplanar()) {
> -		LOG(SimplePipeline, Error)
> -			<< "V4L2 multiplanar devices are not supported";
> -		return false;
> -	}
> -
> -	video_->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
> -
>   	/* Open the converter, if any. */
>   	if (converter) {
>   		converter_ = new SimpleConverter(converter);
> @@ -745,8 +726,7 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>   
>   	for (MediaEntity *sensor : sensors) {
>   		std::unique_ptr<SimpleCameraData> data =
> -			std::make_unique<SimpleCameraData>(this, sensor,
> -							   videos[0]);
> +			std::make_unique<SimpleCameraData>(this, sensor);
>   		if (!data->isValid()) {
>   			LOG(SimplePipeline, Error)
>   				<< "No valid pipeline for sensor '"
> @@ -793,6 +773,35 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>   	return true;
>   }
>   
> +V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity)
> +{
> +	/*
> +	 * Return the V4L2VideoDevice corresponding to the media entity, either
> +	 * as a previously constructed device if available from the cache, or
> +	 * by constructing a new one.
> +	 */
> +
> +	auto iter = videos_.find(entity);
> +	if (iter != videos_.end())
> +		return iter->second.get();
> +
> +	std::unique_ptr<V4L2VideoDevice> video =
> +		std::make_unique<V4L2VideoDevice>(entity);
> +	if (video->open() < 0)
> +		return nullptr;
> +
> +	if (video->caps().isMultiplanar()) {
> +		LOG(SimplePipeline, Error)
> +			<< "V4L2 multiplanar devices are not supported";
> +		return nullptr;
> +	}
> +
> +	video->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
> +
> +	auto element = videos_.emplace(entity, std::move(video));
> +	return element.first->second.get();
> +}
> +
>   V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
>   {
>   	auto iter = subdevs_.find(entity);
> @@ -808,6 +817,9 @@ V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
>   
>   void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>   {
> +	ASSERT(activeCamera_);
> +	SimpleCameraData *data = cameraData(activeCamera_);
> +
>   	/*
>   	 * If an error occurred during capture, or if the buffer was cancelled,
>   	 * complete the request, even if the converter is in use as there's no
> @@ -816,7 +828,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>   	if (buffer->metadata().status != FrameMetadata::FrameSuccess) {
>   		if (useConverter_) {
>   			/* Requeue the buffer for capture. */
> -			video_->queueBuffer(buffer);
> +			data->video_->queueBuffer(buffer);
>   
>   			/*
>   			 * Get the next user-facing buffer to complete the
> @@ -842,7 +854,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>   	 */
>   	if (useConverter_) {
>   		if (converterQueue_.empty()) {
> -			video_->queueBuffer(buffer);
> +			data->video_->queueBuffer(buffer);
>   			return;
>   		}
>   
> @@ -862,14 +874,16 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>   void SimplePipelineHandler::converterDone(FrameBuffer *input,
>   					  FrameBuffer *output)
>   {
> -	/* Complete the request. */
>   	ASSERT(activeCamera_);
> +	SimpleCameraData *data = cameraData(activeCamera_);
> +
> +	/* Complete the request. */
>   	Request *request = output->request();
>   	completeBuffer(activeCamera_, request, output);
>   	completeRequest(activeCamera_, request);
>   
>   	/* Queue the input buffer back for capture. */
> -	video_->queueBuffer(input);
> +	data->video_->queueBuffer(input);
>   }
>   
>   REGISTER_PIPELINE_HANDLER(SimplePipelineHandler);
>
Niklas Söderlund April 21, 2020, 8:40 p.m. UTC | #2
Hi Laurent,

Thanks for your work.

On 2020-04-04 04:16:24 +0300, Laurent Pinchart wrote:
> The simple pipeline handler rejects devices that have multiple capture
> video nodes. There's no real reason to do so, a more dynamic approach is
> possible as the pipeline handler already locates the video device by
> walking the media graph.
> 
> Rework the match sequence by skipping any check on the video nodes, and
> create the V4L2VideoDevice for the media entity at the end of the
> pipeline when initializing the camera data. The V4L2VideoDevice
> instances are managed by the pipeline handler itself, to avoid creating
> separate instances in the camera data if multiple sensors are routed to
> the same video device.
> 
> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>

I think some of this could be merged with previous patches in this 
series, but no biggie. Wether or not you keep it as or merge it,

Reviewed-by: Niklas Söderlund <niklas.soderlund@ragnatech.se>

> ---
>  src/libcamera/pipeline/simple/simple.cpp | 136 +++++++++++++----------
>  1 file changed, 75 insertions(+), 61 deletions(-)
> 
> Andrey, does this fix your issue with the qcom-camss ?
> 
> diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp
> index 8e7dd091b4ab..b5f9177dd383 100644
> --- a/src/libcamera/pipeline/simple/simple.cpp
> +++ b/src/libcamera/pipeline/simple/simple.cpp
> @@ -57,8 +57,7 @@ static const SimplePipelineInfo supportedDevices[] = {
>  class SimpleCameraData : public CameraData
>  {
>  public:
> -	SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
> -			 MediaEntity *video);
> +	SimpleCameraData(SimplePipelineHandler *pipe, MediaEntity *sensor);
>  
>  	bool isValid() const { return sensor_ != nullptr; }
>  	std::set<Stream *> streams() { return { &stream_ }; }
> @@ -82,6 +81,7 @@ public:
>  	Stream stream_;
>  	std::unique_ptr<CameraSensor> sensor_;
>  	std::list<Entity> entities_;
> +	V4L2VideoDevice *video_;
>  
>  	std::vector<Configuration> configs_;
>  	std::map<PixelFormat, Configuration> formats_;
> @@ -129,7 +129,7 @@ public:
>  
>  	bool match(DeviceEnumerator *enumerator) override;
>  
> -	V4L2VideoDevice *video() { return video_; }
> +	V4L2VideoDevice *video(const MediaEntity *entity);
>  	V4L2Subdevice *subdev(const MediaEntity *entity);
>  	SimpleConverter *converter() { return converter_; }
>  
> @@ -151,7 +151,7 @@ private:
>  	void converterDone(FrameBuffer *input, FrameBuffer *output);
>  
>  	MediaDevice *media_;
> -	V4L2VideoDevice *video_;
> +	std::map<const MediaEntity *, std::unique_ptr<V4L2VideoDevice>> videos_;
>  	std::map<const MediaEntity *, V4L2Subdevice> subdevs_;
>  
>  	SimpleConverter *converter_;
> @@ -166,8 +166,8 @@ private:
>   * Camera Data
>   */
>  
> -SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
> -				   MediaEntity *video)
> +SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe,
> +				   MediaEntity *sensor)
>  	: CameraData(pipe)
>  {
>  	int ret;
> @@ -179,8 +179,8 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>  	MediaEntity *source = sensor;
>  
>  	while (source) {
> -		/* If we have reached the video node, we're done. */
> -		if (source == video)
> +		/* If we have reached a video node, we're done. */
> +		if (source->function() == MEDIA_ENT_F_IO_V4L)
>  			break;
>  
>  		/* Use the first output pad that has links. */
> @@ -224,7 +224,14 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>  		}
>  	}
>  
> -	/* We have a valid pipeline, create the camera sensor. */
> +	/*
> +	 * We have a valid pipeline, get the video device and create the camera
> +	 * sensor.
> +	 */
> +	video_ = pipe->video(source);
> +	if (!video_)
> +		return;
> +
>  	sensor_ = std::make_unique<CameraSensor>(sensor);
>  	ret = sensor_->init();
>  	if (ret) {
> @@ -236,7 +243,6 @@ SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
>  int SimpleCameraData::init()
>  {
>  	SimplePipelineHandler *pipe = static_cast<SimplePipelineHandler *>(pipe_);
> -	V4L2VideoDevice *video = pipe->video();
>  	SimpleConverter *converter = pipe->converter();
>  	int ret;
>  
> @@ -266,7 +272,7 @@ int SimpleCameraData::init()
>  		}
>  
>  		std::map<V4L2PixelFormat, std::vector<SizeRange>> videoFormats =
> -			video->formats(format.mbus_code);
> +			video_->formats(format.mbus_code);
>  
>  		LOG(SimplePipeline, Debug)
>  			<< "Adding configuration for " << format.size.toString()
> @@ -285,7 +291,7 @@ int SimpleCameraData::init()
>  		 * PixelFormat is achieved.
>  		 */
>  		for (const auto &videoFormat : videoFormats) {
> -			PixelFormat pixelFormat = video->toPixelFormat(videoFormat.first);
> +			PixelFormat pixelFormat = video_->toPixelFormat(videoFormat.first);
>  			if (!pixelFormat)
>  				continue;
>  
> @@ -451,13 +457,12 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
>   */
>  
>  SimplePipelineHandler::SimplePipelineHandler(CameraManager *manager)
> -	: PipelineHandler(manager), video_(nullptr), converter_(nullptr)
> +	: PipelineHandler(manager), converter_(nullptr)
>  {
>  }
>  
>  SimplePipelineHandler::~SimplePipelineHandler()
>  {
> -	delete video_;
>  	delete converter_;
>  }
>  
> @@ -503,6 +508,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>  	SimpleCameraConfiguration *config =
>  		static_cast<SimpleCameraConfiguration *>(c);
>  	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
>  	StreamConfiguration &cfg = config->at(0);
>  	int ret;
>  
> @@ -524,13 +530,13 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>  		return ret;
>  
>  	/* Configure the video node. */
> -	V4L2PixelFormat videoFormat = video_->toV4L2PixelFormat(pipeConfig.pixelFormat);
> +	V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig.pixelFormat);
>  
>  	V4L2DeviceFormat captureFormat = {};
>  	captureFormat.fourcc = videoFormat;
>  	captureFormat.size = cfg.size;
>  
> -	ret = video_->setFormat(&captureFormat);
> +	ret = video->setFormat(&captureFormat);
>  	if (ret)
>  		return ret;
>  
> @@ -563,6 +569,7 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
>  int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
>  					      std::vector<std::unique_ptr<FrameBuffer>> *buffers)
>  {
> +	SimpleCameraData *data = cameraData(camera);
>  	unsigned int count = stream->configuration().bufferCount;
>  
>  	/*
> @@ -572,23 +579,24 @@ int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
>  	if (useConverter_)
>  		return converter_->exportBuffers(count, buffers);
>  	else
> -		return video_->exportBuffers(count, buffers);
> +		return data->video_->exportBuffers(count, buffers);
>  }
>  
>  int SimplePipelineHandler::start(Camera *camera)
>  {
>  	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
>  	unsigned int count = data->stream_.configuration().bufferCount;
>  	int ret;
>  
>  	if (useConverter_)
> -		ret = video_->allocateBuffers(count, &converterBuffers_);
> +		ret = video->allocateBuffers(count, &converterBuffers_);
>  	else
> -		ret = video_->importBuffers(count);
> +		ret = video->importBuffers(count);
>  	if (ret < 0)
>  		return ret;
>  
> -	ret = video_->streamOn();
> +	ret = video->streamOn();
>  	if (ret < 0) {
>  		stop(camera);
>  		return ret;
> @@ -603,7 +611,7 @@ int SimplePipelineHandler::start(Camera *camera)
>  
>  		/* Queue all internal buffers for capture. */
>  		for (std::unique_ptr<FrameBuffer> &buffer : converterBuffers_)
> -			video_->queueBuffer(buffer.get());
> +			video->queueBuffer(buffer.get());
>  	}
>  
>  	activeCamera_ = camera;
> @@ -613,11 +621,14 @@ int SimplePipelineHandler::start(Camera *camera)
>  
>  void SimplePipelineHandler::stop(Camera *camera)
>  {
> +	SimpleCameraData *data = cameraData(camera);
> +	V4L2VideoDevice *video = data->video_;
> +
>  	if (useConverter_)
>  		converter_->stop();
>  
> -	video_->streamOff();
> -	video_->releaseBuffers();
> +	video->streamOff();
> +	video->releaseBuffers();
>  
>  	converterBuffers_.clear();
>  	activeCamera_ = nullptr;
> @@ -644,7 +655,7 @@ int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request)
>  		return 0;
>  	}
>  
> -	return video_->queueBuffer(buffer);
> +	return data->video_->queueBuffer(buffer);
>  }
>  
>  /* -----------------------------------------------------------------------------
> @@ -672,12 +683,8 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>  	if (!media_)
>  		return false;
>  
> -	/*
> -	 * Locate sensors and video nodes. We only support pipelines with at
> -	 * least one sensor and exactly one video capture node.
> -	 */
> +	/* Locate the sensors. */
>  	std::vector<MediaEntity *> sensors;
> -	std::vector<MediaEntity *> videos;
>  
>  	for (MediaEntity *entity : media_->entities()) {
>  		switch (entity->function()) {
> @@ -685,12 +692,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>  			sensors.push_back(entity);
>  			break;
>  
> -		case MEDIA_ENT_F_IO_V4L:
> -			if (entity->pads().size() == 1 &&
> -			    (entity->pads()[0]->flags() & MEDIA_PAD_FL_SINK))
> -				videos.push_back(entity);
> -			break;
> -
>  		default:
>  			break;
>  		}
> @@ -701,26 +702,6 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>  		return false;
>  	}
>  
> -	if (videos.size() != 1) {
> -		LOG(SimplePipeline, Error)
> -			<< "Pipeline with " << videos.size()
> -			<< " video capture nodes is not supported";
> -		return false;
> -	}
> -
> -	/* Locate and open the capture video node. */
> -	video_ = new V4L2VideoDevice(videos[0]);
> -	if (video_->open() < 0)
> -		return false;
> -
> -	if (video_->caps().isMultiplanar()) {
> -		LOG(SimplePipeline, Error)
> -			<< "V4L2 multiplanar devices are not supported";
> -		return false;
> -	}
> -
> -	video_->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
> -
>  	/* Open the converter, if any. */
>  	if (converter) {
>  		converter_ = new SimpleConverter(converter);
> @@ -745,8 +726,7 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>  
>  	for (MediaEntity *sensor : sensors) {
>  		std::unique_ptr<SimpleCameraData> data =
> -			std::make_unique<SimpleCameraData>(this, sensor,
> -							   videos[0]);
> +			std::make_unique<SimpleCameraData>(this, sensor);
>  		if (!data->isValid()) {
>  			LOG(SimplePipeline, Error)
>  				<< "No valid pipeline for sensor '"
> @@ -793,6 +773,35 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
>  	return true;
>  }
>  
> +V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity)
> +{
> +	/*
> +	 * Return the V4L2VideoDevice corresponding to the media entity, either
> +	 * as a previously constructed device if available from the cache, or
> +	 * by constructing a new one.
> +	 */
> +
> +	auto iter = videos_.find(entity);
> +	if (iter != videos_.end())
> +		return iter->second.get();
> +
> +	std::unique_ptr<V4L2VideoDevice> video =
> +		std::make_unique<V4L2VideoDevice>(entity);
> +	if (video->open() < 0)
> +		return nullptr;
> +
> +	if (video->caps().isMultiplanar()) {
> +		LOG(SimplePipeline, Error)
> +			<< "V4L2 multiplanar devices are not supported";
> +		return nullptr;
> +	}
> +
> +	video->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
> +
> +	auto element = videos_.emplace(entity, std::move(video));
> +	return element.first->second.get();
> +}
> +
>  V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
>  {
>  	auto iter = subdevs_.find(entity);
> @@ -808,6 +817,9 @@ V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
>  
>  void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>  {
> +	ASSERT(activeCamera_);
> +	SimpleCameraData *data = cameraData(activeCamera_);
> +
>  	/*
>  	 * If an error occurred during capture, or if the buffer was cancelled,
>  	 * complete the request, even if the converter is in use as there's no
> @@ -816,7 +828,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>  	if (buffer->metadata().status != FrameMetadata::FrameSuccess) {
>  		if (useConverter_) {
>  			/* Requeue the buffer for capture. */
> -			video_->queueBuffer(buffer);
> +			data->video_->queueBuffer(buffer);
>  
>  			/*
>  			 * Get the next user-facing buffer to complete the
> @@ -842,7 +854,7 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>  	 */
>  	if (useConverter_) {
>  		if (converterQueue_.empty()) {
> -			video_->queueBuffer(buffer);
> +			data->video_->queueBuffer(buffer);
>  			return;
>  		}
>  
> @@ -862,14 +874,16 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
>  void SimplePipelineHandler::converterDone(FrameBuffer *input,
>  					  FrameBuffer *output)
>  {
> -	/* Complete the request. */
>  	ASSERT(activeCamera_);
> +	SimpleCameraData *data = cameraData(activeCamera_);
> +
> +	/* Complete the request. */
>  	Request *request = output->request();
>  	completeBuffer(activeCamera_, request, output);
>  	completeRequest(activeCamera_, request);
>  
>  	/* Queue the input buffer back for capture. */
> -	video_->queueBuffer(input);
> +	data->video_->queueBuffer(input);
>  }
>  
>  REGISTER_PIPELINE_HANDLER(SimplePipelineHandler);
> -- 
> Regards,
> 
> Laurent Pinchart
> 
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel

Patch

diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp
index 8e7dd091b4ab..b5f9177dd383 100644
--- a/src/libcamera/pipeline/simple/simple.cpp
+++ b/src/libcamera/pipeline/simple/simple.cpp
@@ -57,8 +57,7 @@  static const SimplePipelineInfo supportedDevices[] = {
 class SimpleCameraData : public CameraData
 {
 public:
-	SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
-			 MediaEntity *video);
+	SimpleCameraData(SimplePipelineHandler *pipe, MediaEntity *sensor);
 
 	bool isValid() const { return sensor_ != nullptr; }
 	std::set<Stream *> streams() { return { &stream_ }; }
@@ -82,6 +81,7 @@  public:
 	Stream stream_;
 	std::unique_ptr<CameraSensor> sensor_;
 	std::list<Entity> entities_;
+	V4L2VideoDevice *video_;
 
 	std::vector<Configuration> configs_;
 	std::map<PixelFormat, Configuration> formats_;
@@ -129,7 +129,7 @@  public:
 
 	bool match(DeviceEnumerator *enumerator) override;
 
-	V4L2VideoDevice *video() { return video_; }
+	V4L2VideoDevice *video(const MediaEntity *entity);
 	V4L2Subdevice *subdev(const MediaEntity *entity);
 	SimpleConverter *converter() { return converter_; }
 
@@ -151,7 +151,7 @@  private:
 	void converterDone(FrameBuffer *input, FrameBuffer *output);
 
 	MediaDevice *media_;
-	V4L2VideoDevice *video_;
+	std::map<const MediaEntity *, std::unique_ptr<V4L2VideoDevice>> videos_;
 	std::map<const MediaEntity *, V4L2Subdevice> subdevs_;
 
 	SimpleConverter *converter_;
@@ -166,8 +166,8 @@  private:
  * Camera Data
  */
 
-SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
-				   MediaEntity *video)
+SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe,
+				   MediaEntity *sensor)
 	: CameraData(pipe)
 {
 	int ret;
@@ -179,8 +179,8 @@  SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
 	MediaEntity *source = sensor;
 
 	while (source) {
-		/* If we have reached the video node, we're done. */
-		if (source == video)
+		/* If we have reached a video node, we're done. */
+		if (source->function() == MEDIA_ENT_F_IO_V4L)
 			break;
 
 		/* Use the first output pad that has links. */
@@ -224,7 +224,14 @@  SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
 		}
 	}
 
-	/* We have a valid pipeline, create the camera sensor. */
+	/*
+	 * We have a valid pipeline, get the video device and create the camera
+	 * sensor.
+	 */
+	video_ = pipe->video(source);
+	if (!video_)
+		return;
+
 	sensor_ = std::make_unique<CameraSensor>(sensor);
 	ret = sensor_->init();
 	if (ret) {
@@ -236,7 +243,6 @@  SimpleCameraData::SimpleCameraData(PipelineHandler *pipe, MediaEntity *sensor,
 int SimpleCameraData::init()
 {
 	SimplePipelineHandler *pipe = static_cast<SimplePipelineHandler *>(pipe_);
-	V4L2VideoDevice *video = pipe->video();
 	SimpleConverter *converter = pipe->converter();
 	int ret;
 
@@ -266,7 +272,7 @@  int SimpleCameraData::init()
 		}
 
 		std::map<V4L2PixelFormat, std::vector<SizeRange>> videoFormats =
-			video->formats(format.mbus_code);
+			video_->formats(format.mbus_code);
 
 		LOG(SimplePipeline, Debug)
 			<< "Adding configuration for " << format.size.toString()
@@ -285,7 +291,7 @@  int SimpleCameraData::init()
 		 * PixelFormat is achieved.
 		 */
 		for (const auto &videoFormat : videoFormats) {
-			PixelFormat pixelFormat = video->toPixelFormat(videoFormat.first);
+			PixelFormat pixelFormat = video_->toPixelFormat(videoFormat.first);
 			if (!pixelFormat)
 				continue;
 
@@ -451,13 +457,12 @@  CameraConfiguration::Status SimpleCameraConfiguration::validate()
  */
 
 SimplePipelineHandler::SimplePipelineHandler(CameraManager *manager)
-	: PipelineHandler(manager), video_(nullptr), converter_(nullptr)
+	: PipelineHandler(manager), converter_(nullptr)
 {
 }
 
 SimplePipelineHandler::~SimplePipelineHandler()
 {
-	delete video_;
 	delete converter_;
 }
 
@@ -503,6 +508,7 @@  int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
 	SimpleCameraConfiguration *config =
 		static_cast<SimpleCameraConfiguration *>(c);
 	SimpleCameraData *data = cameraData(camera);
+	V4L2VideoDevice *video = data->video_;
 	StreamConfiguration &cfg = config->at(0);
 	int ret;
 
@@ -524,13 +530,13 @@  int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
 		return ret;
 
 	/* Configure the video node. */
-	V4L2PixelFormat videoFormat = video_->toV4L2PixelFormat(pipeConfig.pixelFormat);
+	V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig.pixelFormat);
 
 	V4L2DeviceFormat captureFormat = {};
 	captureFormat.fourcc = videoFormat;
 	captureFormat.size = cfg.size;
 
-	ret = video_->setFormat(&captureFormat);
+	ret = video->setFormat(&captureFormat);
 	if (ret)
 		return ret;
 
@@ -563,6 +569,7 @@  int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
 int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
 					      std::vector<std::unique_ptr<FrameBuffer>> *buffers)
 {
+	SimpleCameraData *data = cameraData(camera);
 	unsigned int count = stream->configuration().bufferCount;
 
 	/*
@@ -572,23 +579,24 @@  int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
 	if (useConverter_)
 		return converter_->exportBuffers(count, buffers);
 	else
-		return video_->exportBuffers(count, buffers);
+		return data->video_->exportBuffers(count, buffers);
 }
 
 int SimplePipelineHandler::start(Camera *camera)
 {
 	SimpleCameraData *data = cameraData(camera);
+	V4L2VideoDevice *video = data->video_;
 	unsigned int count = data->stream_.configuration().bufferCount;
 	int ret;
 
 	if (useConverter_)
-		ret = video_->allocateBuffers(count, &converterBuffers_);
+		ret = video->allocateBuffers(count, &converterBuffers_);
 	else
-		ret = video_->importBuffers(count);
+		ret = video->importBuffers(count);
 	if (ret < 0)
 		return ret;
 
-	ret = video_->streamOn();
+	ret = video->streamOn();
 	if (ret < 0) {
 		stop(camera);
 		return ret;
@@ -603,7 +611,7 @@  int SimplePipelineHandler::start(Camera *camera)
 
 		/* Queue all internal buffers for capture. */
 		for (std::unique_ptr<FrameBuffer> &buffer : converterBuffers_)
-			video_->queueBuffer(buffer.get());
+			video->queueBuffer(buffer.get());
 	}
 
 	activeCamera_ = camera;
@@ -613,11 +621,14 @@  int SimplePipelineHandler::start(Camera *camera)
 
 void SimplePipelineHandler::stop(Camera *camera)
 {
+	SimpleCameraData *data = cameraData(camera);
+	V4L2VideoDevice *video = data->video_;
+
 	if (useConverter_)
 		converter_->stop();
 
-	video_->streamOff();
-	video_->releaseBuffers();
+	video->streamOff();
+	video->releaseBuffers();
 
 	converterBuffers_.clear();
 	activeCamera_ = nullptr;
@@ -644,7 +655,7 @@  int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request)
 		return 0;
 	}
 
-	return video_->queueBuffer(buffer);
+	return data->video_->queueBuffer(buffer);
 }
 
 /* -----------------------------------------------------------------------------
@@ -672,12 +683,8 @@  bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
 	if (!media_)
 		return false;
 
-	/*
-	 * Locate sensors and video nodes. We only support pipelines with at
-	 * least one sensor and exactly one video capture node.
-	 */
+	/* Locate the sensors. */
 	std::vector<MediaEntity *> sensors;
-	std::vector<MediaEntity *> videos;
 
 	for (MediaEntity *entity : media_->entities()) {
 		switch (entity->function()) {
@@ -685,12 +692,6 @@  bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
 			sensors.push_back(entity);
 			break;
 
-		case MEDIA_ENT_F_IO_V4L:
-			if (entity->pads().size() == 1 &&
-			    (entity->pads()[0]->flags() & MEDIA_PAD_FL_SINK))
-				videos.push_back(entity);
-			break;
-
 		default:
 			break;
 		}
@@ -701,26 +702,6 @@  bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
 		return false;
 	}
 
-	if (videos.size() != 1) {
-		LOG(SimplePipeline, Error)
-			<< "Pipeline with " << videos.size()
-			<< " video capture nodes is not supported";
-		return false;
-	}
-
-	/* Locate and open the capture video node. */
-	video_ = new V4L2VideoDevice(videos[0]);
-	if (video_->open() < 0)
-		return false;
-
-	if (video_->caps().isMultiplanar()) {
-		LOG(SimplePipeline, Error)
-			<< "V4L2 multiplanar devices are not supported";
-		return false;
-	}
-
-	video_->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
-
 	/* Open the converter, if any. */
 	if (converter) {
 		converter_ = new SimpleConverter(converter);
@@ -745,8 +726,7 @@  bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
 
 	for (MediaEntity *sensor : sensors) {
 		std::unique_ptr<SimpleCameraData> data =
-			std::make_unique<SimpleCameraData>(this, sensor,
-							   videos[0]);
+			std::make_unique<SimpleCameraData>(this, sensor);
 		if (!data->isValid()) {
 			LOG(SimplePipeline, Error)
 				<< "No valid pipeline for sensor '"
@@ -793,6 +773,35 @@  bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
 	return true;
 }
 
+V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity)
+{
+	/*
+	 * Return the V4L2VideoDevice corresponding to the media entity, either
+	 * as a previously constructed device if available from the cache, or
+	 * by constructing a new one.
+	 */
+
+	auto iter = videos_.find(entity);
+	if (iter != videos_.end())
+		return iter->second.get();
+
+	std::unique_ptr<V4L2VideoDevice> video =
+		std::make_unique<V4L2VideoDevice>(entity);
+	if (video->open() < 0)
+		return nullptr;
+
+	if (video->caps().isMultiplanar()) {
+		LOG(SimplePipeline, Error)
+			<< "V4L2 multiplanar devices are not supported";
+		return nullptr;
+	}
+
+	video->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
+
+	auto element = videos_.emplace(entity, std::move(video));
+	return element.first->second.get();
+}
+
 V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
 {
 	auto iter = subdevs_.find(entity);
@@ -808,6 +817,9 @@  V4L2Subdevice *SimplePipelineHandler::subdev(const MediaEntity *entity)
 
 void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
 {
+	ASSERT(activeCamera_);
+	SimpleCameraData *data = cameraData(activeCamera_);
+
 	/*
 	 * If an error occurred during capture, or if the buffer was cancelled,
 	 * complete the request, even if the converter is in use as there's no
@@ -816,7 +828,7 @@  void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
 	if (buffer->metadata().status != FrameMetadata::FrameSuccess) {
 		if (useConverter_) {
 			/* Requeue the buffer for capture. */
-			video_->queueBuffer(buffer);
+			data->video_->queueBuffer(buffer);
 
 			/*
 			 * Get the next user-facing buffer to complete the
@@ -842,7 +854,7 @@  void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
 	 */
 	if (useConverter_) {
 		if (converterQueue_.empty()) {
-			video_->queueBuffer(buffer);
+			data->video_->queueBuffer(buffer);
 			return;
 		}
 
@@ -862,14 +874,16 @@  void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
 void SimplePipelineHandler::converterDone(FrameBuffer *input,
 					  FrameBuffer *output)
 {
-	/* Complete the request. */
 	ASSERT(activeCamera_);
+	SimpleCameraData *data = cameraData(activeCamera_);
+
+	/* Complete the request. */
 	Request *request = output->request();
 	completeBuffer(activeCamera_, request, output);
 	completeRequest(activeCamera_, request);
 
 	/* Queue the input buffer back for capture. */
-	video_->queueBuffer(input);
+	data->video_->queueBuffer(input);
 }
 
 REGISTER_PIPELINE_HANDLER(SimplePipelineHandler);