[libcamera-devel,6/6,PoC/RFC] libcamera: pipeline: Add RaspberryPi handler

Message ID 20190808151221.24254-7-kieran.bingham@ideasonboard.com
State Superseded
Headers show
Series
  • V4L2 M2M Support (+RPi PoC)
Related show

Commit Message

Kieran Bingham Aug. 8, 2019, 3:12 p.m. UTC
Utilise the CameraSensor class and construct a pipeline for a single
sensor on the Unicam, routed through the V4L2 Codec ISP interface.

Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
---
 src/libcamera/pipeline/meson.build     |   1 +
 src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
 2 files changed, 426 insertions(+)
 create mode 100644 src/libcamera/pipeline/raspberrypi.cpp

Comments

Kieran Bingham Aug. 8, 2019, 3:58 p.m. UTC | #1
Just in case anyone actually tries this patch out - I've now updated my
kernel branch which will require the following fixup to this patch to
function: (or wait for a v2 for any testing)

diff --git a/src/libcamera/pipeline/raspberrypi.cpp
b/src/libcamera/pipeline/raspberrypi.cpp
index 3d0b4453bfad..12512ebf62db 100644
--- a/src/libcamera/pipeline/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi.cpp
@@ -341,7 +341,7 @@ bool PipelineHandlerRPi::match(DeviceEnumerator
*enumerator)
        DeviceMatch codec("bcm2835-codec");

        /* We explicitly need the ISP device from the MMAL codec driver. */
-       codec.add("bcm2835-codec-isp");
+       codec.add("bcm2835-codec-isp-source");

        unicam_ = enumerator->search(unicam);
        if (!unicam_)
@@ -362,9 +362,11 @@ bool PipelineHandlerRPi::match(DeviceEnumerator
*enumerator)
                return false;

        /* Locate the ISP M2M node */
-       MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
-       if (!isp)
+       MediaEntity *isp =
codec_->getEntityByName("bcm2835-codec-isp-source");
+       if (!isp) {
+               LOG(RPI, Error) << "Could not identify the ISP";
                return false;
+       }

        data->isp_ = new V4L2M2MDevice(isp->deviceNode());
        if (data->isp_->status()) {


On 08/08/2019 16:12, Kieran Bingham wrote:
> Utilise the CameraSensor class and construct a pipeline for a single
> sensor on the Unicam, routed through the V4L2 Codec ISP interface.
> 
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> ---
>  src/libcamera/pipeline/meson.build     |   1 +
>  src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
>  2 files changed, 426 insertions(+)
>  create mode 100644 src/libcamera/pipeline/raspberrypi.cpp
> 
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 0d466225a72e..7ed7b26f3033 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -1,4 +1,5 @@
>  libcamera_sources += files([
> +    'raspberrypi.cpp',
>      'uvcvideo.cpp',
>      'vimc.cpp',
>  ])
> diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
> new file mode 100644
> index 000000000000..4c4c3dea0113
> --- /dev/null
> +++ b/src/libcamera/pipeline/raspberrypi.cpp
> @@ -0,0 +1,425 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * raspberrypi.cpp - Pipeline handler for raspberrypi devices
> + */
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_controls.h"
> +#include "v4l2_videodevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RPI)
> +
> +class RPiCameraData : public CameraData
> +{
> +public:
> +	RPiCameraData(PipelineHandler *pipe)
> +		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
> +		  isp_(nullptr)
> +	{
> +	}
> +
> +	~RPiCameraData()
> +	{
> +		bayerBuffers_.destroyBuffers();
> +		delete unicam_;
> +		delete isp_;
> +	}
> +
> +	void sensorReady(Buffer *buffer);
> +	void ispOutputReady(Buffer *buffer);
> +	void ispCaptureReady(Buffer *buffer);
> +
> +	CameraSensor *sensor_;
> +	V4L2VideoDevice *unicam_;
> +	V4L2M2MDevice *isp_;
> +	Stream stream_;
> +
> +	BufferPool bayerBuffers_;
> +	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
> +};
> +
> +class RPiCameraConfiguration : public CameraConfiguration
> +{
> +public:
> +	RPiCameraConfiguration();
> +
> +	Status validate() override;
> +};
> +
> +class PipelineHandlerRPi : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRPi(CameraManager *manager);
> +	~PipelineHandlerRPi();
> +
> +	CameraConfiguration *
> +	generateConfiguration(Camera *camera,
> +			      const StreamRoles &roles) override;
> +	int configure(Camera *camera,
> +		      CameraConfiguration *config) override;
> +
> +	int allocateBuffers(Camera *camera,
> +			    const std::set<Stream *> &streams) override;
> +	int freeBuffers(Camera *camera,
> +			const std::set<Stream *> &streams) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	RPiCameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RPiCameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	std::shared_ptr<MediaDevice> unicam_;
> +	std::shared_ptr<MediaDevice> codec_;
> +};
> +
> +RPiCameraConfiguration::RPiCameraConfiguration()
> +	: CameraConfiguration()
> +{
> +}
> +
> +CameraConfiguration::Status RPiCameraConfiguration::validate()
> +{
> +	Status status = Valid;
> +
> +	if (config_.empty())
> +		return Invalid;
> +
> +	/* Todo: Experiment with increased stream support through the ISP. */
> +	if (config_.size() > 1) {
> +		config_.resize(1);
> +		status = Adjusted;
> +	}
> +
> +	StreamConfiguration &cfg = config_[0];
> +
> +	/* Todo: restrict to hardware capabilities. */
> +
> +	cfg.bufferCount = 4;
> +
> +	return status;
> +}
> +
> +PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
> +	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRPi::~PipelineHandlerRPi()
> +{
> +	if (unicam_)
> +		unicam_->release();
> +
> +	if (codec_)
> +		codec_->release();
> +}
> +
> +CameraConfiguration *
> +PipelineHandlerRPi::generateConfiguration(Camera *camera,
> +					  const StreamRoles &roles)
> +{
> +	CameraConfiguration *config = new RPiCameraConfiguration();
> +
> +	if (roles.empty())
> +		return config;
> +
> +	StreamConfiguration cfg{};
> +	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
> +	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();
> +	cfg.bufferCount = 4;
> +
> +	LOG(RPI, Debug) << "Config default as " << cfg.toString();
> +
> +	config->addConfiguration(cfg);
> +
> +	config->validate();
> +
> +	return config;
> +}
> +
> +int PipelineHandlerRPi::configure(Camera *camera,
> +				  CameraConfiguration *config)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	StreamConfiguration &cfg = config->at(0);
> +	int ret;
> +
> +	Size sensorSize = { 1920, 1080 };
> +	Size outputSize = { 1920, 1088 };
> +
> +	V4L2DeviceFormat format = {};
> +	format.size = sensorSize;
> +	format.fourcc = V4L2_PIX_FMT_SRGGB10P;
> +
> +	LOG(RPI, Debug) << "Setting format to " << format.toString();
> +
> +	ret = data->unicam_->setFormat(&format);
> +	if (ret)
> +		return ret;
> +
> +	if (format.size != sensorSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on Video device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	format.size = outputSize;
> +
> +	ret = data->isp_->output()->setFormat(&format);
> +
> +	if (format.size != outputSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error) << "Failed to set format on ISP output device: "
> +				<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	/* Configure the ISP based to generate the requested size and format. */
> +	format.size = cfg.size;
> +	format.fourcc = cfg.pixelFormat;
> +
> +	ret = data->isp_->capture()->setFormat(&format);
> +
> +	if (format.size != cfg.size ||
> +	    format.fourcc != cfg.pixelFormat) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on ISP capture device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	cfg.setStream(&data->stream_);
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRPi::allocateBuffers(Camera *camera,
> +					const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Stream *stream = *streams.begin();
> +	const StreamConfiguration &cfg = stream->configuration();
> +	int ret;
> +
> +	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";
> +
> +	/*
> +	 * Buffers are allocated on the camera, and the capture pad of the ISP
> +	 *      unicam -> isp.output -> isp.capture -> Application
> +	 */
> +
> +	/* Create a new intermediate buffer pool */
> +	data->bayerBuffers_.createBuffers(cfg.bufferCount);
> +
> +	/* Tie the unicam video buffers to the intermediate pool */
> +	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	/* Tie the stream buffers to the capture device of the ISP */
> +	if (stream->memoryType() == InternalMemory)
> +		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
> +	else
> +		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::freeBuffers(Camera *camera,
> +				    const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	ret = data->unicam_->releaseBuffers();
> +	ret = data->isp_->output()->releaseBuffers();
> +	ret = data->isp_->capture()->releaseBuffers();
> +
> +	data->bayerBuffers_.destroyBuffers();
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::start(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	data->rawBuffers_ = data->unicam_->queueAllBuffers();
> +	if (data->rawBuffers_.empty()) {
> +		LOG(RPI, Debug) << "Failed to queue unicam buffers";
> +		return -EINVAL;
> +	}
> +
> +	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
> +
> +	V4L2ControlList controls;
> +	controls.add(V4L2_CID_EXPOSURE, 1700);
> +	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
> +	ret = data->unicam_->setControls(&controls);
> +	if (ret) {
> +		LOG(RPI, Error) << "Failed to set controls";
> +		return ret;
> +	}
> +
> +	ret = data->isp_->output()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->capture()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->unicam_->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	return ret;
> +}
> +
> +void PipelineHandlerRPi::stop(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +
> +	data->isp_->capture()->streamOff();
> +	data->isp_->output()->streamOff();
> +	data->unicam_->streamOff();
> +
> +	data->rawBuffers_.clear();
> +}
> +
> +int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Buffer *buffer = request->findBuffer(&data->stream_);
> +	if (!buffer) {
> +		LOG(RPI, Error)
> +			<< "Attempt to queue request with invalid stream";
> +
> +		return -ENOENT;
> +	}
> +
> +	int ret = data->isp_->capture()->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
> +{
> +	DeviceMatch unicam("unicam");
> +	DeviceMatch codec("bcm2835-codec");
> +
> +	/* We explicitly need the ISP device from the MMAL codec driver. */
> +	codec.add("bcm2835-codec-isp");
> +
> +	unicam_ = enumerator->search(unicam);
> +	if (!unicam_)
> +		return false;
> +
> +	codec_ = enumerator->search(codec);
> +	if (!codec_)
> +		return false;
> +
> +	unicam_->acquire();
> +	codec_->acquire();
> +
> +	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
> +
> +	/* Locate and open the unicam video node. */
> +	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
> +	if (data->unicam_->open())
> +		return false;
> +
> +	/* Locate the ISP M2M node */
> +	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
> +	if (!isp)
> +		return false;
> +
> +	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
> +	if (data->isp_->status()) {
> +		LOG(RPI, Error) << "Could not create the ISP device";
> +		return false;
> +	}
> +
> +	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
> +	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
> +	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
> +
> +	/* Identify the sensor */
> +	for (MediaEntity *entity : unicam_->entities()) {
> +		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
> +			data->sensor_ = new CameraSensor(entity);
> +			break;
> +		}
> +	}
> +
> +	if (!data->sensor_)
> +		return false;
> +
> +	int ret = data->sensor_->init();
> +	if (ret)
> +		return false;
> +
> +	/* Create and register the camera. */
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, data->sensor_->entity()->name(), streams);
> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return true;
> +}
> +
> +void RPiCameraData::sensorReady(Buffer *buffer)
> +{
> +	/* Deliver the frame from the sensor to the ISP */
> +	isp_->output()->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispOutputReady(Buffer *buffer)
> +{
> +	/* Return a completed buffer from the ISP back to the sensor */
> +	unicam_->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispCaptureReady(Buffer *buffer)
> +{
> +	Request *request = buffer->request();
> +
> +	pipe_->completeBuffer(camera_, request, buffer);
> +	pipe_->completeRequest(camera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
> +
> +} /* namespace libcamera */
>
Laurent Pinchart Aug. 8, 2019, 9:51 p.m. UTC | #2
Hi Kieran,

Thank you for the patch.

On Thu, Aug 08, 2019 at 04:12:21PM +0100, Kieran Bingham wrote:
> Utilise the CameraSensor class and construct a pipeline for a single
> sensor on the Unicam, routed through the V4L2 Codec ISP interface.
> 
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>

A pipeline handler for the RPi, very nice :-)

> ---
>  src/libcamera/pipeline/meson.build     |   1 +
>  src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
>  2 files changed, 426 insertions(+)
>  create mode 100644 src/libcamera/pipeline/raspberrypi.cpp
> 
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 0d466225a72e..7ed7b26f3033 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -1,4 +1,5 @@
>  libcamera_sources += files([
> +    'raspberrypi.cpp',

I would expect the same level of complexity as the IPU3 and RkISP1
pipeline handlers, should this get its own directory ?

>      'uvcvideo.cpp',
>      'vimc.cpp',
>  ])
> diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
> new file mode 100644
> index 000000000000..4c4c3dea0113
> --- /dev/null
> +++ b/src/libcamera/pipeline/raspberrypi.cpp
> @@ -0,0 +1,425 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * raspberrypi.cpp - Pipeline handler for raspberrypi devices

s/raspberrypi devices/Raspberry Pi devices/ ?

Should we name it Raspberry Pi, or based on the SoC ? It could be used
on other systems using the same SoC (or family of SoCs).

> + */
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_controls.h"
> +#include "v4l2_videodevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RPI)
> +
> +class RPiCameraData : public CameraData
> +{
> +public:
> +	RPiCameraData(PipelineHandler *pipe)
> +		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
> +		  isp_(nullptr)
> +	{
> +	}
> +
> +	~RPiCameraData()
> +	{
> +		bayerBuffers_.destroyBuffers();

Shouldn't you also delete sensor_ ?

> +		delete unicam_;
> +		delete isp_;
> +	}
> +
> +	void sensorReady(Buffer *buffer);
> +	void ispOutputReady(Buffer *buffer);
> +	void ispCaptureReady(Buffer *buffer);
> +
> +	CameraSensor *sensor_;
> +	V4L2VideoDevice *unicam_;
> +	V4L2M2MDevice *isp_;
> +	Stream stream_;
> +
> +	BufferPool bayerBuffers_;
> +	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
> +};
> +
> +class RPiCameraConfiguration : public CameraConfiguration
> +{
> +public:
> +	RPiCameraConfiguration();
> +
> +	Status validate() override;
> +};
> +
> +class PipelineHandlerRPi : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRPi(CameraManager *manager);
> +	~PipelineHandlerRPi();
> +
> +	CameraConfiguration *
> +	generateConfiguration(Camera *camera,
> +			      const StreamRoles &roles) override;
> +	int configure(Camera *camera,
> +		      CameraConfiguration *config) override;
> +
> +	int allocateBuffers(Camera *camera,
> +			    const std::set<Stream *> &streams) override;
> +	int freeBuffers(Camera *camera,
> +			const std::set<Stream *> &streams) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	RPiCameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RPiCameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	std::shared_ptr<MediaDevice> unicam_;
> +	std::shared_ptr<MediaDevice> codec_;
> +};
> +
> +RPiCameraConfiguration::RPiCameraConfiguration()
> +	: CameraConfiguration()
> +{
> +}
> +
> +CameraConfiguration::Status RPiCameraConfiguration::validate()
> +{
> +	Status status = Valid;
> +
> +	if (config_.empty())
> +		return Invalid;
> +
> +	/* Todo: Experiment with increased stream support through the ISP. */

s/Todo:/\todo/

> +	if (config_.size() > 1) {
> +		config_.resize(1);
> +		status = Adjusted;
> +	}
> +
> +	StreamConfiguration &cfg = config_[0];
> +
> +	/* Todo: restrict to hardware capabilities. */

I think this one should be addressed before merging the code.

> +
> +	cfg.bufferCount = 4;
> +
> +	return status;
> +}
> +
> +PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
> +	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRPi::~PipelineHandlerRPi()
> +{
> +	if (unicam_)
> +		unicam_->release();
> +
> +	if (codec_)
> +		codec_->release();
> +}
> +
> +CameraConfiguration *
> +PipelineHandlerRPi::generateConfiguration(Camera *camera,
> +					  const StreamRoles &roles)
> +{
> +	CameraConfiguration *config = new RPiCameraConfiguration();
> +
> +	if (roles.empty())
> +		return config;
> +
> +	StreamConfiguration cfg{};
> +	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
> +	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();

Let's remove commented-out code.

What prevents from using the sensor resolution, and how is 1080p
selected as the default resolution ?

> +	cfg.bufferCount = 4;
> +
> +	LOG(RPI, Debug) << "Config default as " << cfg.toString();

I think the configuration is printed in the caller already.

> +
> +	config->addConfiguration(cfg);
> +
> +	config->validate();
> +
> +	return config;
> +}
> +
> +int PipelineHandlerRPi::configure(Camera *camera,
> +				  CameraConfiguration *config)

This holds on a single line.

> +{
> +	RPiCameraData *data = cameraData(camera);
> +	StreamConfiguration &cfg = config->at(0);
> +	int ret;
> +
> +	Size sensorSize = { 1920, 1080 };
> +	Size outputSize = { 1920, 1088 };
> +
> +	V4L2DeviceFormat format = {};
> +	format.size = sensorSize;
> +	format.fourcc = V4L2_PIX_FMT_SRGGB10P;

This all seems to lack genericity :-) I think that at least he format
should support different Bayer patterns and bpp values.

Don't you need to set the format on the sensor subdev ?

> +
> +	LOG(RPI, Debug) << "Setting format to " << format.toString();
> +
> +	ret = data->unicam_->setFormat(&format);
> +	if (ret)
> +		return ret;
> +
> +	if (format.size != sensorSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on Video device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	format.size = outputSize;
> +
> +	ret = data->isp_->output()->setFormat(&format);
> +
> +	if (format.size != outputSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error) << "Failed to set format on ISP output device: "
> +				<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	/* Configure the ISP based to generate the requested size and format. */

Are you missing something after "based" ?

> +	format.size = cfg.size;
> +	format.fourcc = cfg.pixelFormat;
> +
> +	ret = data->isp_->capture()->setFormat(&format);
> +
> +	if (format.size != cfg.size ||
> +	    format.fourcc != cfg.pixelFormat) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on ISP capture device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	cfg.setStream(&data->stream_);
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRPi::allocateBuffers(Camera *camera,
> +					const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Stream *stream = *streams.begin();
> +	const StreamConfiguration &cfg = stream->configuration();
> +	int ret;
> +
> +	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";

I'd drop this message, or move it to Camera::allocateBuffers().

> +
> +	/*
> +	 * Buffers are allocated on the camera, and the capture pad of the ISP
> +	 *      unicam -> isp.output -> isp.capture -> Application
> +	 */
> +
> +	/* Create a new intermediate buffer pool */

s/pool/pool./ (same comment for all other comments in this file that
don't end with a period)

> +	data->bayerBuffers_.createBuffers(cfg.bufferCount);
> +
> +	/* Tie the unicam video buffers to the intermediate pool */
> +	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	/* Tie the stream buffers to the capture device of the ISP */
> +	if (stream->memoryType() == InternalMemory)
> +		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
> +	else
> +		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::freeBuffers(Camera *camera,
> +				    const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	ret = data->unicam_->releaseBuffers();
> +	ret = data->isp_->output()->releaseBuffers();
> +	ret = data->isp_->capture()->releaseBuffers();

You're losing the first two error values.

> +
> +	data->bayerBuffers_.destroyBuffers();
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::start(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	data->rawBuffers_ = data->unicam_->queueAllBuffers();
> +	if (data->rawBuffers_.empty()) {
> +		LOG(RPI, Debug) << "Failed to queue unicam buffers";
> +		return -EINVAL;
> +	}
> +
> +	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
> +
> +	V4L2ControlList controls;
> +	controls.add(V4L2_CID_EXPOSURE, 1700);
> +	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
> +	ret = data->unicam_->setControls(&controls);
> +	if (ret) {
> +		LOG(RPI, Error) << "Failed to set controls";
> +		return ret;
> +	}
> +
> +	ret = data->isp_->output()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->capture()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->unicam_->streamOn();
> +	if (ret)
> +		return ret;

Shouldn't you stop streaming on the successfully started streams when an
error happens ?

> +
> +	return ret;

	return 0;

> +}
> +
> +void PipelineHandlerRPi::stop(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +
> +	data->isp_->capture()->streamOff();
> +	data->isp_->output()->streamOff();
> +	data->unicam_->streamOff();
> +
> +	data->rawBuffers_.clear();
> +}
> +
> +int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Buffer *buffer = request->findBuffer(&data->stream_);
> +	if (!buffer) {
> +		LOG(RPI, Error)
> +			<< "Attempt to queue request with invalid stream";
> +
> +		return -ENOENT;

Can this happen ?

> +	}
> +
> +	int ret = data->isp_->capture()->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
> +{
> +	DeviceMatch unicam("unicam");
> +	DeviceMatch codec("bcm2835-codec");
> +
> +	/* We explicitly need the ISP device from the MMAL codec driver. */
> +	codec.add("bcm2835-codec-isp");

Is there any subdev in the unicam device that we could match on ?

> +
> +	unicam_ = enumerator->search(unicam);
> +	if (!unicam_)
> +		return false;
> +
> +	codec_ = enumerator->search(codec);
> +	if (!codec_)
> +		return false;
> +
> +	unicam_->acquire();
> +	codec_->acquire();
> +
> +	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
> +
> +	/* Locate and open the unicam video node. */
> +	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
> +	if (data->unicam_->open())
> +		return false;
> +
> +	/* Locate the ISP M2M node */
> +	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
> +	if (!isp)
> +		return false;
> +
> +	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
> +	if (data->isp_->status()) {
> +		LOG(RPI, Error) << "Could not create the ISP device";
> +		return false;
> +	}
> +
> +	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
> +	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
> +	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
> +
> +	/* Identify the sensor */
> +	for (MediaEntity *entity : unicam_->entities()) {
> +		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
> +			data->sensor_ = new CameraSensor(entity);
> +			break;
> +		}
> +	}
> +
> +	if (!data->sensor_)
> +		return false;
> +
> +	int ret = data->sensor_->init();
> +	if (ret)
> +		return false;

No need for an intermediate variable, you can write

	if (data->sensor_->init())
		return false;

> +
> +	/* Create and register the camera. */
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, data->sensor_->entity()->name(), streams);
> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return true;
> +}
> +
> +void RPiCameraData::sensorReady(Buffer *buffer)
> +{
> +	/* Deliver the frame from the sensor to the ISP */

You should skip this when buffer->status() == Buffer::BufferCancelled
(see the IPU3 pipeline handler).

> +	isp_->output()->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispOutputReady(Buffer *buffer)
> +{
> +	/* Return a completed buffer from the ISP back to the sensor */

Same comment here.

> +	unicam_->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispCaptureReady(Buffer *buffer)
> +{
> +	Request *request = buffer->request();
> +
> +	pipe_->completeBuffer(camera_, request, buffer);
> +	pipe_->completeRequest(camera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
> +
> +} /* namespace libcamera */
Kieran Bingham Aug. 9, 2019, 11:19 a.m. UTC | #3
Hi Laurent,

On 08/08/2019 22:51, Laurent Pinchart wrote:
> Hi Kieran,
> 
> Thank you for the patch.
> 
> On Thu, Aug 08, 2019 at 04:12:21PM +0100, Kieran Bingham wrote:
>> Utilise the CameraSensor class and construct a pipeline for a single
>> sensor on the Unicam, routed through the V4L2 Codec ISP interface.
>>
>> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> 
> A pipeline handler for the RPi, very nice :-)

A "Proof of Concept" pipeline handler :-) - It works - but needs more
development and requires out of tree drivers for the kernel, and
out-of-driver patches for the out of tree drivers :S

We'll get there :-D

Thanks for the review comments. I've pretty much taken most of them in.

>> ---
>>  src/libcamera/pipeline/meson.build     |   1 +
>>  src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
>>  2 files changed, 426 insertions(+)
>>  create mode 100644 src/libcamera/pipeline/raspberrypi.cpp
>>
>> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
>> index 0d466225a72e..7ed7b26f3033 100644
>> --- a/src/libcamera/pipeline/meson.build
>> +++ b/src/libcamera/pipeline/meson.build
>> @@ -1,4 +1,5 @@
>>  libcamera_sources += files([
>> +    'raspberrypi.cpp',
> 
> I would expect the same level of complexity as the IPU3 and RkISP1
> pipeline handlers, should this get its own directory ?

As yet, I've seen only a single file for the IPU3, and RKISP1.

What other files are expected? The IPA code will go in a separate
directory right? So I'm not yet sure what will be broken out in the
pipeline handlers to need their own directory.

(Except the IPU3 - That looks like it could be split to have an
ipu3.cpp, imgu.cpp, cio2.cpp.)



> 
>>      'uvcvideo.cpp',
>>      'vimc.cpp',
>>  ])
>> diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
>> new file mode 100644
>> index 000000000000..4c4c3dea0113
>> --- /dev/null
>> +++ b/src/libcamera/pipeline/raspberrypi.cpp
>> @@ -0,0 +1,425 @@
>> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
>> +/*
>> + * Copyright (C) 2019, Google Inc.
>> + *
>> + * raspberrypi.cpp - Pipeline handler for raspberrypi devices
> 
> s/raspberrypi devices/Raspberry Pi devices/ ?
> 
> Should we name it Raspberry Pi, or based on the SoC ? It could be used
> on other systems using the same SoC (or family of SoCs).

I expect this pipeline handler to function on all of the Raspberry Pi's
with a camera interface. This covers multiple SoCs.

So lets stick with RaspberryPi as the name I think.


>> + */
>> +
>> +#include <libcamera/camera.h>
>> +#include <libcamera/request.h>
>> +#include <libcamera/stream.h>
>> +
>> +#include "camera_sensor.h"
>> +#include "device_enumerator.h"
>> +#include "log.h"
>> +#include "media_device.h"
>> +#include "pipeline_handler.h"
>> +#include "utils.h"
>> +#include "v4l2_controls.h"
>> +#include "v4l2_videodevice.h"
>> +
>> +namespace libcamera {
>> +
>> +LOG_DEFINE_CATEGORY(RPI)
>> +
>> +class RPiCameraData : public CameraData
>> +{
>> +public:
>> +	RPiCameraData(PipelineHandler *pipe)
>> +		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
>> +		  isp_(nullptr)
>> +	{
>> +	}
>> +
>> +	~RPiCameraData()
>> +	{
>> +		bayerBuffers_.destroyBuffers();
> 
> Shouldn't you also delete sensor_ ?

Yup.


> 
>> +		delete unicam_;
>> +		delete isp_;
>> +	}
>> +
>> +	void sensorReady(Buffer *buffer);
>> +	void ispOutputReady(Buffer *buffer);
>> +	void ispCaptureReady(Buffer *buffer);
>> +
>> +	CameraSensor *sensor_;
>> +	V4L2VideoDevice *unicam_;
>> +	V4L2M2MDevice *isp_;
>> +	Stream stream_;
>> +
>> +	BufferPool bayerBuffers_;
>> +	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
>> +};
>> +
>> +class RPiCameraConfiguration : public CameraConfiguration
>> +{
>> +public:
>> +	RPiCameraConfiguration();
>> +
>> +	Status validate() override;
>> +};
>> +
>> +class PipelineHandlerRPi : public PipelineHandler
>> +{
>> +public:
>> +	PipelineHandlerRPi(CameraManager *manager);
>> +	~PipelineHandlerRPi();
>> +
>> +	CameraConfiguration *
>> +	generateConfiguration(Camera *camera,
>> +			      const StreamRoles &roles) override;
>> +	int configure(Camera *camera,
>> +		      CameraConfiguration *config) override;
>> +
>> +	int allocateBuffers(Camera *camera,
>> +			    const std::set<Stream *> &streams) override;
>> +	int freeBuffers(Camera *camera,
>> +			const std::set<Stream *> &streams) override;
>> +
>> +	int start(Camera *camera) override;
>> +	void stop(Camera *camera) override;
>> +
>> +	int queueRequest(Camera *camera, Request *request) override;
>> +
>> +	bool match(DeviceEnumerator *enumerator) override;
>> +
>> +private:
>> +	RPiCameraData *cameraData(const Camera *camera)
>> +	{
>> +		return static_cast<RPiCameraData *>(
>> +			PipelineHandler::cameraData(camera));
>> +	}
>> +
>> +	std::shared_ptr<MediaDevice> unicam_;
>> +	std::shared_ptr<MediaDevice> codec_;
>> +};
>> +
>> +RPiCameraConfiguration::RPiCameraConfiguration()
>> +	: CameraConfiguration()
>> +{
>> +}
>> +
>> +CameraConfiguration::Status RPiCameraConfiguration::validate()
>> +{
>> +	Status status = Valid;
>> +
>> +	if (config_.empty())
>> +		return Invalid;
>> +
>> +	/* Todo: Experiment with increased stream support through the ISP. */
> 
> s/Todo:/\todo/
> 
>> +	if (config_.size() > 1) {
>> +		config_.resize(1);
>> +		status = Adjusted;
>> +	}
>> +
>> +	StreamConfiguration &cfg = config_[0];
>> +
>> +	/* Todo: restrict to hardware capabilities. */
> 
> I think this one should be addressed before merging the code.


Yes, needs more development.



> 
>> +
>> +	cfg.bufferCount = 4;
>> +
>> +	return status;
>> +}
>> +
>> +PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
>> +	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
>> +{
>> +}
>> +
>> +PipelineHandlerRPi::~PipelineHandlerRPi()
>> +{
>> +	if (unicam_)
>> +		unicam_->release();
>> +
>> +	if (codec_)
>> +		codec_->release();
>> +}
>> +
>> +CameraConfiguration *
>> +PipelineHandlerRPi::generateConfiguration(Camera *camera,
>> +					  const StreamRoles &roles)
>> +{
>> +	CameraConfiguration *config = new RPiCameraConfiguration();
>> +
>> +	if (roles.empty())
>> +		return config;
>> +
>> +	StreamConfiguration cfg{};
>> +	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
>> +	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();
> 
> Let's remove commented-out code.
> 
> What prevents from using the sensor resolution, and how is 1080p
> selected as the default resolution ?

The RaspberryPi v2 camera defaults as a 3280x2464 output.

The ISP has currently got an (incorrect) artificial limitation of 1080p.

We can work through this when we get updated ISP support.


>> +	cfg.bufferCount = 4;
>> +
>> +	LOG(RPI, Debug) << "Config default as " << cfg.toString();
> 
> I think the configuration is printed in the caller already.
> 

Removed.

>> +
>> +	config->addConfiguration(cfg);
>> +
>> +	config->validate();
>> +
>> +	return config;
>> +}
>> +
>> +int PipelineHandlerRPi::configure(Camera *camera,
>> +				  CameraConfiguration *config)
> 
> This holds on a single line.
> 
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +	StreamConfiguration &cfg = config->at(0);
>> +	int ret;
>> +
>> +	Size sensorSize = { 1920, 1080 };
>> +	Size outputSize = { 1920, 1088 };
>> +
>> +	V4L2DeviceFormat format = {};
>> +	format.size = sensorSize;
>> +	format.fourcc = V4L2_PIX_FMT_SRGGB10P;
> 
> This all seems to lack genericity :-) I think that at least he format
> should support different Bayer patterns and bpp values.
> 
> Don't you need to set the format on the sensor subdev ?
> 
>> +
>> +	LOG(RPI, Debug) << "Setting format to " << format.toString();
>> +
>> +	ret = data->unicam_->setFormat(&format);
>> +	if (ret)
>> +		return ret;
>> +
>> +	if (format.size != sensorSize ||
>> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
>> +		LOG(RPI, Error)
>> +			<< "Failed to set format on Video device: "
>> +			<< format.toString();
>> +		return -EINVAL;
>> +	}
>> +
>> +	format.size = outputSize;
>> +
>> +	ret = data->isp_->output()->setFormat(&format);
>> +
>> +	if (format.size != outputSize ||
>> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
>> +		LOG(RPI, Error) << "Failed to set format on ISP output device: "
>> +				<< format.toString();
>> +		return -EINVAL;
>> +	}
>> +
>> +	/* Configure the ISP based to generate the requested size and format. */
> 
> Are you missing something after "based" ?

s/based//

Or it could simply be "Configure the ISP" ... but we have limited
configuration so far...


> 
>> +	format.size = cfg.size;
>> +	format.fourcc = cfg.pixelFormat;
>> +
>> +	ret = data->isp_->capture()->setFormat(&format);
>> +
>> +	if (format.size != cfg.size ||
>> +	    format.fourcc != cfg.pixelFormat) {
>> +		LOG(RPI, Error)
>> +			<< "Failed to set format on ISP capture device: "
>> +			<< format.toString();
>> +		return -EINVAL;
>> +	}
>> +
>> +	cfg.setStream(&data->stream_);
>> +
>> +	return 0;
>> +}
>> +
>> +int PipelineHandlerRPi::allocateBuffers(Camera *camera,
>> +					const std::set<Stream *> &streams)
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +	Stream *stream = *streams.begin();
>> +	const StreamConfiguration &cfg = stream->configuration();
>> +	int ret;
>> +
>> +	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";
> 
> I'd drop this message, or move it to Camera::allocateBuffers().

Its stream dependent ... so I'll just drop it.


> 
>> +
>> +	/*
>> +	 * Buffers are allocated on the camera, and the capture pad of the ISP
>> +	 *      unicam -> isp.output -> isp.capture -> Application
>> +	 */
>> +
>> +	/* Create a new intermediate buffer pool */
> 
> s/pool/pool./ (same comment for all other comments in this file that
> don't end with a period)

Do you think you can make a rule for this in checkstyle.py?


>> +	data->bayerBuffers_.createBuffers(cfg.bufferCount);
>> +
>> +	/* Tie the unicam video buffers to the intermediate pool */
>> +	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
>> +	if (ret)
>> +		return ret;
>> +
>> +	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
>> +	if (ret)
>> +		return ret;
>> +
>> +	/* Tie the stream buffers to the capture device of the ISP */
>> +	if (stream->memoryType() == InternalMemory)
>> +		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
>> +	else
>> +		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
>> +
>> +	return ret;
>> +}
>> +
>> +int PipelineHandlerRPi::freeBuffers(Camera *camera,
>> +				    const std::set<Stream *> &streams)
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +	int ret;
>> +
>> +	ret = data->unicam_->releaseBuffers();
>> +	ret = data->isp_->output()->releaseBuffers();
>> +	ret = data->isp_->capture()->releaseBuffers();
> 
> You're losing the first two error values.

Yup - quick and dirty PoC :D

There are three cleanup operations. If one fails - should the other
still be attempted? Or just give up ?

> 
>> +
>> +	data->bayerBuffers_.destroyBuffers();
>> +
>> +	return ret;
>> +}
>> +
>> +int PipelineHandlerRPi::start(Camera *camera)
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +	int ret;
>> +
>> +	data->rawBuffers_ = data->unicam_->queueAllBuffers();
>> +	if (data->rawBuffers_.empty()) {
>> +		LOG(RPI, Debug) << "Failed to queue unicam buffers";
>> +		return -EINVAL;
>> +	}
>> +
>> +	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
>> +
>> +	V4L2ControlList controls;
>> +	controls.add(V4L2_CID_EXPOSURE, 1700);
>> +	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
>> +	ret = data->unicam_->setControls(&controls);
>> +	if (ret) {
>> +		LOG(RPI, Error) << "Failed to set controls";
>> +		return ret;
>> +	}
>> +
>> +	ret = data->isp_->output()->streamOn();
>> +	if (ret)
>> +		return ret;
>> +
>> +	ret = data->isp_->capture()->streamOn();
>> +	if (ret)
>> +		return ret;
>> +
>> +	ret = data->unicam_->streamOn();
>> +	if (ret)
>> +		return ret;
> 
> Shouldn't you stop streaming on the successfully started streams when an
> error happens ?
> 
>> +
>> +	return ret;
> 
> 	return 0;
> 
>> +}
>> +
>> +void PipelineHandlerRPi::stop(Camera *camera)
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +
>> +	data->isp_->capture()->streamOff();
>> +	data->isp_->output()->streamOff();
>> +	data->unicam_->streamOff();
>> +
>> +	data->rawBuffers_.clear();
>> +}
>> +
>> +int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
>> +{
>> +	RPiCameraData *data = cameraData(camera);
>> +	Buffer *buffer = request->findBuffer(&data->stream_);
>> +	if (!buffer) {
>> +		LOG(RPI, Error)
>> +			<< "Attempt to queue request with invalid stream";
>> +
>> +		return -ENOENT;
> 
> Can this happen ?

I don't know -  I think this was taken from the new buffer handling bits
which have changed quite a lot lately.

This code exactly mirrors the queueRequest in the RkISP1, and Vimc.


> 
>> +	}
>> +
>> +	int ret = data->isp_->capture()->queueBuffer(buffer);
>> +	if (ret < 0)
>> +		return ret;
>> +
>> +	PipelineHandler::queueRequest(camera, request);
>> +
>> +	return 0;
>> +}
>> +
>> +bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
>> +{
>> +	DeviceMatch unicam("unicam");
>> +	DeviceMatch codec("bcm2835-codec");
>> +
>> +	/* We explicitly need the ISP device from the MMAL codec driver. */
>> +	codec.add("bcm2835-codec-isp");
> 
> Is there any subdev in the unicam device that we could match on ?


Possibly one called "unicam" ... should we match "unicam":"unicam" ? or
is "unicam" sufficient...


I'll add:

	/* The video node is also named unicam. */
	unicam.add("unicam");

>> +
>> +	unicam_ = enumerator->search(unicam);
>> +	if (!unicam_)
>> +		return false;
>> +
>> +	codec_ = enumerator->search(codec);
>> +	if (!codec_)
>> +		return false;
>> +
>> +	unicam_->acquire();
>> +	codec_->acquire();
>> +
>> +	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
>> +
>> +	/* Locate and open the unicam video node. */
>> +	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
>> +	if (data->unicam_->open())
>> +		return false;
>> +
>> +	/* Locate the ISP M2M node */
>> +	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
>> +	if (!isp)
>> +		return false;
>> +
>> +	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
>> +	if (data->isp_->status()) {
>> +		LOG(RPI, Error) << "Could not create the ISP device";
>> +		return false;
>> +	}
>> +
>> +	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
>> +	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
>> +	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
>> +
>> +	/* Identify the sensor */
>> +	for (MediaEntity *entity : unicam_->entities()) {
>> +		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
>> +			data->sensor_ = new CameraSensor(entity);
>> +			break;
>> +		}
>> +	}
>> +
>> +	if (!data->sensor_)
>> +		return false;
>> +
>> +	int ret = data->sensor_->init();
>> +	if (ret)
>> +		return false;
> 
> No need for an intermediate variable, you can write
> 
> 	if (data->sensor_->init())
> 		return false;

done

>> +
>> +	/* Create and register the camera. */
>> +	std::set<Stream *> streams{ &data->stream_ };
>> +	std::shared_ptr<Camera> camera =
>> +		Camera::create(this, data->sensor_->entity()->name(), streams);
>> +	registerCamera(std::move(camera), std::move(data));
>> +
>> +	return true;
>> +}
>> +
>> +void RPiCameraData::sensorReady(Buffer *buffer)
>> +{
>> +	/* Deliver the frame from the sensor to the ISP */
> 
> You should skip this when buffer->status() == Buffer::BufferCancelled
> (see the IPU3 pipeline handler).

Hrm ... I had this ... I must have lost them along a rebase.



> 
>> +	isp_->output()->queueBuffer(buffer);
>> +}
>> +
>> +void RPiCameraData::ispOutputReady(Buffer *buffer)
>> +{
>> +	/* Return a completed buffer from the ISP back to the sensor */
> 
> Same comment here.
> 
>> +	unicam_->queueBuffer(buffer);
>> +}
>> +
>> +void RPiCameraData::ispCaptureReady(Buffer *buffer)
>> +{
>> +	Request *request = buffer->request();
>> +
>> +	pipe_->completeBuffer(camera_, request, buffer);
>> +	pipe_->completeRequest(camera_, request);
>> +}
>> +
>> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
>> +
>> +} /* namespace libcamera */
>
Laurent Pinchart Aug. 9, 2019, 6:13 p.m. UTC | #4
Hi Kieran,

On Fri, Aug 09, 2019 at 12:19:17PM +0100, Kieran Bingham wrote:
> On 08/08/2019 22:51, Laurent Pinchart wrote:
> > On Thu, Aug 08, 2019 at 04:12:21PM +0100, Kieran Bingham wrote:
> >> Utilise the CameraSensor class and construct a pipeline for a single
> >> sensor on the Unicam, routed through the V4L2 Codec ISP interface.
> >>
> >> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> > 
> > A pipeline handler for the RPi, very nice :-)
> 
> A "Proof of Concept" pipeline handler :-) - It works - but needs more
> development and requires out of tree drivers for the kernel, and
> out-of-driver patches for the out of tree drivers :S
> 
> We'll get there :-D
> 
> Thanks for the review comments. I've pretty much taken most of them in.
> 
> >> ---
> >>  src/libcamera/pipeline/meson.build     |   1 +
> >>  src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
> >>  2 files changed, 426 insertions(+)
> >>  create mode 100644 src/libcamera/pipeline/raspberrypi.cpp
> >>
> >> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> >> index 0d466225a72e..7ed7b26f3033 100644
> >> --- a/src/libcamera/pipeline/meson.build
> >> +++ b/src/libcamera/pipeline/meson.build
> >> @@ -1,4 +1,5 @@
> >>  libcamera_sources += files([
> >> +    'raspberrypi.cpp',
> > 
> > I would expect the same level of complexity as the IPU3 and RkISP1
> > pipeline handlers, should this get its own directory ?
> 
> As yet, I've seen only a single file for the IPU3, and RKISP1.
> 
> What other files are expected? The IPA code will go in a separate
> directory right? So I'm not yet sure what will be broken out in the
> pipeline handlers to need their own directory.

I'm not sure yet. Let's leave it as-is for now.

> (Except the IPU3 - That looks like it could be split to have an
> ipu3.cpp, imgu.cpp, cio2.cpp.)
> 
> >>      'uvcvideo.cpp',
> >>      'vimc.cpp',
> >>  ])
> >> diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
> >> new file mode 100644
> >> index 000000000000..4c4c3dea0113
> >> --- /dev/null
> >> +++ b/src/libcamera/pipeline/raspberrypi.cpp
> >> @@ -0,0 +1,425 @@
> >> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> >> +/*
> >> + * Copyright (C) 2019, Google Inc.
> >> + *
> >> + * raspberrypi.cpp - Pipeline handler for raspberrypi devices
> > 
> > s/raspberrypi devices/Raspberry Pi devices/ ?
> > 
> > Should we name it Raspberry Pi, or based on the SoC ? It could be used
> > on other systems using the same SoC (or family of SoCs).
> 
> I expect this pipeline handler to function on all of the Raspberry Pi's
> with a camera interface. This covers multiple SoCs.
> 
> So lets stick with RaspberryPi as the name I think.

OK.

> >> + */
> >> +
> >> +#include <libcamera/camera.h>
> >> +#include <libcamera/request.h>
> >> +#include <libcamera/stream.h>
> >> +
> >> +#include "camera_sensor.h"
> >> +#include "device_enumerator.h"
> >> +#include "log.h"
> >> +#include "media_device.h"
> >> +#include "pipeline_handler.h"
> >> +#include "utils.h"
> >> +#include "v4l2_controls.h"
> >> +#include "v4l2_videodevice.h"
> >> +
> >> +namespace libcamera {
> >> +
> >> +LOG_DEFINE_CATEGORY(RPI)
> >> +
> >> +class RPiCameraData : public CameraData
> >> +{
> >> +public:
> >> +	RPiCameraData(PipelineHandler *pipe)
> >> +		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
> >> +		  isp_(nullptr)
> >> +	{
> >> +	}
> >> +
> >> +	~RPiCameraData()
> >> +	{
> >> +		bayerBuffers_.destroyBuffers();
> > 
> > Shouldn't you also delete sensor_ ?
> 
> Yup.
> 
> >> +		delete unicam_;
> >> +		delete isp_;
> >> +	}
> >> +
> >> +	void sensorReady(Buffer *buffer);
> >> +	void ispOutputReady(Buffer *buffer);
> >> +	void ispCaptureReady(Buffer *buffer);
> >> +
> >> +	CameraSensor *sensor_;
> >> +	V4L2VideoDevice *unicam_;
> >> +	V4L2M2MDevice *isp_;
> >> +	Stream stream_;
> >> +
> >> +	BufferPool bayerBuffers_;
> >> +	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
> >> +};
> >> +
> >> +class RPiCameraConfiguration : public CameraConfiguration
> >> +{
> >> +public:
> >> +	RPiCameraConfiguration();
> >> +
> >> +	Status validate() override;
> >> +};
> >> +
> >> +class PipelineHandlerRPi : public PipelineHandler
> >> +{
> >> +public:
> >> +	PipelineHandlerRPi(CameraManager *manager);
> >> +	~PipelineHandlerRPi();
> >> +
> >> +	CameraConfiguration *
> >> +	generateConfiguration(Camera *camera,
> >> +			      const StreamRoles &roles) override;
> >> +	int configure(Camera *camera,
> >> +		      CameraConfiguration *config) override;
> >> +
> >> +	int allocateBuffers(Camera *camera,
> >> +			    const std::set<Stream *> &streams) override;
> >> +	int freeBuffers(Camera *camera,
> >> +			const std::set<Stream *> &streams) override;
> >> +
> >> +	int start(Camera *camera) override;
> >> +	void stop(Camera *camera) override;
> >> +
> >> +	int queueRequest(Camera *camera, Request *request) override;
> >> +
> >> +	bool match(DeviceEnumerator *enumerator) override;
> >> +
> >> +private:
> >> +	RPiCameraData *cameraData(const Camera *camera)
> >> +	{
> >> +		return static_cast<RPiCameraData *>(
> >> +			PipelineHandler::cameraData(camera));
> >> +	}
> >> +
> >> +	std::shared_ptr<MediaDevice> unicam_;
> >> +	std::shared_ptr<MediaDevice> codec_;
> >> +};
> >> +
> >> +RPiCameraConfiguration::RPiCameraConfiguration()
> >> +	: CameraConfiguration()
> >> +{
> >> +}
> >> +
> >> +CameraConfiguration::Status RPiCameraConfiguration::validate()
> >> +{
> >> +	Status status = Valid;
> >> +
> >> +	if (config_.empty())
> >> +		return Invalid;
> >> +
> >> +	/* Todo: Experiment with increased stream support through the ISP. */
> > 
> > s/Todo:/\todo/
> > 
> >> +	if (config_.size() > 1) {
> >> +		config_.resize(1);
> >> +		status = Adjusted;
> >> +	}
> >> +
> >> +	StreamConfiguration &cfg = config_[0];
> >> +
> >> +	/* Todo: restrict to hardware capabilities. */
> > 
> > I think this one should be addressed before merging the code.
> 
> Yes, needs more development.
> 
> >> +
> >> +	cfg.bufferCount = 4;
> >> +
> >> +	return status;
> >> +}
> >> +
> >> +PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
> >> +	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
> >> +{
> >> +}
> >> +
> >> +PipelineHandlerRPi::~PipelineHandlerRPi()
> >> +{
> >> +	if (unicam_)
> >> +		unicam_->release();
> >> +
> >> +	if (codec_)
> >> +		codec_->release();
> >> +}
> >> +
> >> +CameraConfiguration *
> >> +PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >> +					  const StreamRoles &roles)
> >> +{
> >> +	CameraConfiguration *config = new RPiCameraConfiguration();
> >> +
> >> +	if (roles.empty())
> >> +		return config;
> >> +
> >> +	StreamConfiguration cfg{};
> >> +	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
> >> +	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();
> > 
> > Let's remove commented-out code.
> > 
> > What prevents from using the sensor resolution, and how is 1080p
> > selected as the default resolution ?
> 
> The RaspberryPi v2 camera defaults as a 3280x2464 output.
> 
> The ISP has currently got an (incorrect) artificial limitation of 1080p.
> 
> We can work through this when we get updated ISP support.
> 
> >> +	cfg.bufferCount = 4;
> >> +
> >> +	LOG(RPI, Debug) << "Config default as " << cfg.toString();
> > 
> > I think the configuration is printed in the caller already.
> 
> Removed.
> 
> >> +
> >> +	config->addConfiguration(cfg);
> >> +
> >> +	config->validate();
> >> +
> >> +	return config;
> >> +}
> >> +
> >> +int PipelineHandlerRPi::configure(Camera *camera,
> >> +				  CameraConfiguration *config)
> > 
> > This holds on a single line.
> > 
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +	StreamConfiguration &cfg = config->at(0);
> >> +	int ret;
> >> +
> >> +	Size sensorSize = { 1920, 1080 };
> >> +	Size outputSize = { 1920, 1088 };
> >> +
> >> +	V4L2DeviceFormat format = {};
> >> +	format.size = sensorSize;
> >> +	format.fourcc = V4L2_PIX_FMT_SRGGB10P;
> > 
> > This all seems to lack genericity :-) I think that at least he format
> > should support different Bayer patterns and bpp values.
> > 
> > Don't you need to set the format on the sensor subdev ?
> > 
> >> +
> >> +	LOG(RPI, Debug) << "Setting format to " << format.toString();
> >> +
> >> +	ret = data->unicam_->setFormat(&format);
> >> +	if (ret)
> >> +		return ret;
> >> +
> >> +	if (format.size != sensorSize ||
> >> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> >> +		LOG(RPI, Error)
> >> +			<< "Failed to set format on Video device: "
> >> +			<< format.toString();
> >> +		return -EINVAL;
> >> +	}
> >> +
> >> +	format.size = outputSize;
> >> +
> >> +	ret = data->isp_->output()->setFormat(&format);
> >> +
> >> +	if (format.size != outputSize ||
> >> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> >> +		LOG(RPI, Error) << "Failed to set format on ISP output device: "
> >> +				<< format.toString();
> >> +		return -EINVAL;
> >> +	}
> >> +
> >> +	/* Configure the ISP based to generate the requested size and format. */
> > 
> > Are you missing something after "based" ?
> 
> s/based//
> 
> Or it could simply be "Configure the ISP" ... but we have limited
> configuration so far...
> 
> >> +	format.size = cfg.size;
> >> +	format.fourcc = cfg.pixelFormat;
> >> +
> >> +	ret = data->isp_->capture()->setFormat(&format);
> >> +
> >> +	if (format.size != cfg.size ||
> >> +	    format.fourcc != cfg.pixelFormat) {
> >> +		LOG(RPI, Error)
> >> +			<< "Failed to set format on ISP capture device: "
> >> +			<< format.toString();
> >> +		return -EINVAL;
> >> +	}
> >> +
> >> +	cfg.setStream(&data->stream_);
> >> +
> >> +	return 0;
> >> +}
> >> +
> >> +int PipelineHandlerRPi::allocateBuffers(Camera *camera,
> >> +					const std::set<Stream *> &streams)
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +	Stream *stream = *streams.begin();
> >> +	const StreamConfiguration &cfg = stream->configuration();
> >> +	int ret;
> >> +
> >> +	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";
> > 
> > I'd drop this message, or move it to Camera::allocateBuffers().
> 
> Its stream dependent ... so I'll just drop it.
> 
> >> +
> >> +	/*
> >> +	 * Buffers are allocated on the camera, and the capture pad of the ISP
> >> +	 *      unicam -> isp.output -> isp.capture -> Application
> >> +	 */
> >> +
> >> +	/* Create a new intermediate buffer pool */
> > 
> > s/pool/pool./ (same comment for all other comments in this file that
> > don't end with a period)
> 
> Do you think you can make a rule for this in checkstyle.py?

I've added it to the todo list.

> >> +	data->bayerBuffers_.createBuffers(cfg.bufferCount);
> >> +
> >> +	/* Tie the unicam video buffers to the intermediate pool */
> >> +	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
> >> +	if (ret)
> >> +		return ret;
> >> +
> >> +	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
> >> +	if (ret)
> >> +		return ret;
> >> +
> >> +	/* Tie the stream buffers to the capture device of the ISP */
> >> +	if (stream->memoryType() == InternalMemory)
> >> +		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
> >> +	else
> >> +		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
> >> +
> >> +	return ret;
> >> +}
> >> +
> >> +int PipelineHandlerRPi::freeBuffers(Camera *camera,
> >> +				    const std::set<Stream *> &streams)
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +	int ret;
> >> +
> >> +	ret = data->unicam_->releaseBuffers();
> >> +	ret = data->isp_->output()->releaseBuffers();
> >> +	ret = data->isp_->capture()->releaseBuffers();
> > 
> > You're losing the first two error values.
> 
> Yup - quick and dirty PoC :D
> 
> There are three cleanup operations. If one fails - should the other
> still be attempted? Or just give up ?

I think they should be attempted as it's a cleanup path.

> >> +
> >> +	data->bayerBuffers_.destroyBuffers();
> >> +
> >> +	return ret;
> >> +}
> >> +
> >> +int PipelineHandlerRPi::start(Camera *camera)
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +	int ret;
> >> +
> >> +	data->rawBuffers_ = data->unicam_->queueAllBuffers();
> >> +	if (data->rawBuffers_.empty()) {
> >> +		LOG(RPI, Debug) << "Failed to queue unicam buffers";
> >> +		return -EINVAL;
> >> +	}
> >> +
> >> +	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
> >> +
> >> +	V4L2ControlList controls;
> >> +	controls.add(V4L2_CID_EXPOSURE, 1700);
> >> +	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
> >> +	ret = data->unicam_->setControls(&controls);
> >> +	if (ret) {
> >> +		LOG(RPI, Error) << "Failed to set controls";
> >> +		return ret;
> >> +	}
> >> +
> >> +	ret = data->isp_->output()->streamOn();
> >> +	if (ret)
> >> +		return ret;
> >> +
> >> +	ret = data->isp_->capture()->streamOn();
> >> +	if (ret)
> >> +		return ret;
> >> +
> >> +	ret = data->unicam_->streamOn();
> >> +	if (ret)
> >> +		return ret;
> > 
> > Shouldn't you stop streaming on the successfully started streams when an
> > error happens ?
> > 
> >> +
> >> +	return ret;
> > 
> > 	return 0;
> > 
> >> +}
> >> +
> >> +void PipelineHandlerRPi::stop(Camera *camera)
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +
> >> +	data->isp_->capture()->streamOff();
> >> +	data->isp_->output()->streamOff();
> >> +	data->unicam_->streamOff();
> >> +
> >> +	data->rawBuffers_.clear();
> >> +}
> >> +
> >> +int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
> >> +{
> >> +	RPiCameraData *data = cameraData(camera);
> >> +	Buffer *buffer = request->findBuffer(&data->stream_);
> >> +	if (!buffer) {
> >> +		LOG(RPI, Error)
> >> +			<< "Attempt to queue request with invalid stream";
> >> +
> >> +		return -ENOENT;
> > 
> > Can this happen ?
> 
> I don't know -  I think this was taken from the new buffer handling bits
> which have changed quite a lot lately.

My question was whether the Camera class had checks that would prevent
this from happening.

> This code exactly mirrors the queueRequest in the RkISP1, and Vimc.
> 
> >> +	}
> >> +
> >> +	int ret = data->isp_->capture()->queueBuffer(buffer);
> >> +	if (ret < 0)
> >> +		return ret;
> >> +
> >> +	PipelineHandler::queueRequest(camera, request);
> >> +
> >> +	return 0;
> >> +}
> >> +
> >> +bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
> >> +{
> >> +	DeviceMatch unicam("unicam");
> >> +	DeviceMatch codec("bcm2835-codec");
> >> +
> >> +	/* We explicitly need the ISP device from the MMAL codec driver. */
> >> +	codec.add("bcm2835-codec-isp");
> > 
> > Is there any subdev in the unicam device that we could match on ?
> 
> Possibly one called "unicam" ... should we match "unicam":"unicam" ? or
> is "unicam" sufficient...
> 
> I'll add:
> 
> 	/* The video node is also named unicam. */
> 	unicam.add("unicam");

Works for me.

> >> +
> >> +	unicam_ = enumerator->search(unicam);
> >> +	if (!unicam_)
> >> +		return false;
> >> +
> >> +	codec_ = enumerator->search(codec);
> >> +	if (!codec_)
> >> +		return false;
> >> +
> >> +	unicam_->acquire();
> >> +	codec_->acquire();
> >> +
> >> +	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
> >> +
> >> +	/* Locate and open the unicam video node. */
> >> +	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
> >> +	if (data->unicam_->open())
> >> +		return false;
> >> +
> >> +	/* Locate the ISP M2M node */
> >> +	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
> >> +	if (!isp)
> >> +		return false;
> >> +
> >> +	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
> >> +	if (data->isp_->status()) {
> >> +		LOG(RPI, Error) << "Could not create the ISP device";
> >> +		return false;
> >> +	}
> >> +
> >> +	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
> >> +	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
> >> +	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
> >> +
> >> +	/* Identify the sensor */
> >> +	for (MediaEntity *entity : unicam_->entities()) {
> >> +		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
> >> +			data->sensor_ = new CameraSensor(entity);
> >> +			break;
> >> +		}
> >> +	}
> >> +
> >> +	if (!data->sensor_)
> >> +		return false;
> >> +
> >> +	int ret = data->sensor_->init();
> >> +	if (ret)
> >> +		return false;
> > 
> > No need for an intermediate variable, you can write
> > 
> > 	if (data->sensor_->init())
> > 		return false;
> 
> done
> 
> >> +
> >> +	/* Create and register the camera. */
> >> +	std::set<Stream *> streams{ &data->stream_ };
> >> +	std::shared_ptr<Camera> camera =
> >> +		Camera::create(this, data->sensor_->entity()->name(), streams);
> >> +	registerCamera(std::move(camera), std::move(data));
> >> +
> >> +	return true;
> >> +}
> >> +
> >> +void RPiCameraData::sensorReady(Buffer *buffer)
> >> +{
> >> +	/* Deliver the frame from the sensor to the ISP */
> > 
> > You should skip this when buffer->status() == Buffer::BufferCancelled
> > (see the IPU3 pipeline handler).
> 
> Hrm ... I had this ... I must have lost them along a rebase.
> 
> >> +	isp_->output()->queueBuffer(buffer);
> >> +}
> >> +
> >> +void RPiCameraData::ispOutputReady(Buffer *buffer)
> >> +{
> >> +	/* Return a completed buffer from the ISP back to the sensor */
> > 
> > Same comment here.
> > 
> >> +	unicam_->queueBuffer(buffer);
> >> +}
> >> +
> >> +void RPiCameraData::ispCaptureReady(Buffer *buffer)
> >> +{
> >> +	Request *request = buffer->request();
> >> +
> >> +	pipe_->completeBuffer(camera_, request, buffer);
> >> +	pipe_->completeRequest(camera_, request);
> >> +}
> >> +
> >> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
> >> +
> >> +} /* namespace libcamera */

Patch

diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
index 0d466225a72e..7ed7b26f3033 100644
--- a/src/libcamera/pipeline/meson.build
+++ b/src/libcamera/pipeline/meson.build
@@ -1,4 +1,5 @@ 
 libcamera_sources += files([
+    'raspberrypi.cpp',
     'uvcvideo.cpp',
     'vimc.cpp',
 ])
diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
new file mode 100644
index 000000000000..4c4c3dea0113
--- /dev/null
+++ b/src/libcamera/pipeline/raspberrypi.cpp
@@ -0,0 +1,425 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * raspberrypi.cpp - Pipeline handler for raspberrypi devices
+ */
+
+#include <libcamera/camera.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+#include "camera_sensor.h"
+#include "device_enumerator.h"
+#include "log.h"
+#include "media_device.h"
+#include "pipeline_handler.h"
+#include "utils.h"
+#include "v4l2_controls.h"
+#include "v4l2_videodevice.h"
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(RPI)
+
+class RPiCameraData : public CameraData
+{
+public:
+	RPiCameraData(PipelineHandler *pipe)
+		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
+		  isp_(nullptr)
+	{
+	}
+
+	~RPiCameraData()
+	{
+		bayerBuffers_.destroyBuffers();
+		delete unicam_;
+		delete isp_;
+	}
+
+	void sensorReady(Buffer *buffer);
+	void ispOutputReady(Buffer *buffer);
+	void ispCaptureReady(Buffer *buffer);
+
+	CameraSensor *sensor_;
+	V4L2VideoDevice *unicam_;
+	V4L2M2MDevice *isp_;
+	Stream stream_;
+
+	BufferPool bayerBuffers_;
+	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
+};
+
+class RPiCameraConfiguration : public CameraConfiguration
+{
+public:
+	RPiCameraConfiguration();
+
+	Status validate() override;
+};
+
+class PipelineHandlerRPi : public PipelineHandler
+{
+public:
+	PipelineHandlerRPi(CameraManager *manager);
+	~PipelineHandlerRPi();
+
+	CameraConfiguration *
+	generateConfiguration(Camera *camera,
+			      const StreamRoles &roles) override;
+	int configure(Camera *camera,
+		      CameraConfiguration *config) override;
+
+	int allocateBuffers(Camera *camera,
+			    const std::set<Stream *> &streams) override;
+	int freeBuffers(Camera *camera,
+			const std::set<Stream *> &streams) override;
+
+	int start(Camera *camera) override;
+	void stop(Camera *camera) override;
+
+	int queueRequest(Camera *camera, Request *request) override;
+
+	bool match(DeviceEnumerator *enumerator) override;
+
+private:
+	RPiCameraData *cameraData(const Camera *camera)
+	{
+		return static_cast<RPiCameraData *>(
+			PipelineHandler::cameraData(camera));
+	}
+
+	std::shared_ptr<MediaDevice> unicam_;
+	std::shared_ptr<MediaDevice> codec_;
+};
+
+RPiCameraConfiguration::RPiCameraConfiguration()
+	: CameraConfiguration()
+{
+}
+
+CameraConfiguration::Status RPiCameraConfiguration::validate()
+{
+	Status status = Valid;
+
+	if (config_.empty())
+		return Invalid;
+
+	/* Todo: Experiment with increased stream support through the ISP. */
+	if (config_.size() > 1) {
+		config_.resize(1);
+		status = Adjusted;
+	}
+
+	StreamConfiguration &cfg = config_[0];
+
+	/* Todo: restrict to hardware capabilities. */
+
+	cfg.bufferCount = 4;
+
+	return status;
+}
+
+PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
+	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
+{
+}
+
+PipelineHandlerRPi::~PipelineHandlerRPi()
+{
+	if (unicam_)
+		unicam_->release();
+
+	if (codec_)
+		codec_->release();
+}
+
+CameraConfiguration *
+PipelineHandlerRPi::generateConfiguration(Camera *camera,
+					  const StreamRoles &roles)
+{
+	CameraConfiguration *config = new RPiCameraConfiguration();
+
+	if (roles.empty())
+		return config;
+
+	StreamConfiguration cfg{};
+	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
+	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();
+	cfg.bufferCount = 4;
+
+	LOG(RPI, Debug) << "Config default as " << cfg.toString();
+
+	config->addConfiguration(cfg);
+
+	config->validate();
+
+	return config;
+}
+
+int PipelineHandlerRPi::configure(Camera *camera,
+				  CameraConfiguration *config)
+{
+	RPiCameraData *data = cameraData(camera);
+	StreamConfiguration &cfg = config->at(0);
+	int ret;
+
+	Size sensorSize = { 1920, 1080 };
+	Size outputSize = { 1920, 1088 };
+
+	V4L2DeviceFormat format = {};
+	format.size = sensorSize;
+	format.fourcc = V4L2_PIX_FMT_SRGGB10P;
+
+	LOG(RPI, Debug) << "Setting format to " << format.toString();
+
+	ret = data->unicam_->setFormat(&format);
+	if (ret)
+		return ret;
+
+	if (format.size != sensorSize ||
+	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
+		LOG(RPI, Error)
+			<< "Failed to set format on Video device: "
+			<< format.toString();
+		return -EINVAL;
+	}
+
+	format.size = outputSize;
+
+	ret = data->isp_->output()->setFormat(&format);
+
+	if (format.size != outputSize ||
+	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
+		LOG(RPI, Error) << "Failed to set format on ISP output device: "
+				<< format.toString();
+		return -EINVAL;
+	}
+
+	/* Configure the ISP based to generate the requested size and format. */
+	format.size = cfg.size;
+	format.fourcc = cfg.pixelFormat;
+
+	ret = data->isp_->capture()->setFormat(&format);
+
+	if (format.size != cfg.size ||
+	    format.fourcc != cfg.pixelFormat) {
+		LOG(RPI, Error)
+			<< "Failed to set format on ISP capture device: "
+			<< format.toString();
+		return -EINVAL;
+	}
+
+	cfg.setStream(&data->stream_);
+
+	return 0;
+}
+
+int PipelineHandlerRPi::allocateBuffers(Camera *camera,
+					const std::set<Stream *> &streams)
+{
+	RPiCameraData *data = cameraData(camera);
+	Stream *stream = *streams.begin();
+	const StreamConfiguration &cfg = stream->configuration();
+	int ret;
+
+	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";
+
+	/*
+	 * Buffers are allocated on the camera, and the capture pad of the ISP
+	 *      unicam -> isp.output -> isp.capture -> Application
+	 */
+
+	/* Create a new intermediate buffer pool */
+	data->bayerBuffers_.createBuffers(cfg.bufferCount);
+
+	/* Tie the unicam video buffers to the intermediate pool */
+	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
+	if (ret)
+		return ret;
+
+	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
+	if (ret)
+		return ret;
+
+	/* Tie the stream buffers to the capture device of the ISP */
+	if (stream->memoryType() == InternalMemory)
+		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
+	else
+		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
+
+	return ret;
+}
+
+int PipelineHandlerRPi::freeBuffers(Camera *camera,
+				    const std::set<Stream *> &streams)
+{
+	RPiCameraData *data = cameraData(camera);
+	int ret;
+
+	ret = data->unicam_->releaseBuffers();
+	ret = data->isp_->output()->releaseBuffers();
+	ret = data->isp_->capture()->releaseBuffers();
+
+	data->bayerBuffers_.destroyBuffers();
+
+	return ret;
+}
+
+int PipelineHandlerRPi::start(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+	int ret;
+
+	data->rawBuffers_ = data->unicam_->queueAllBuffers();
+	if (data->rawBuffers_.empty()) {
+		LOG(RPI, Debug) << "Failed to queue unicam buffers";
+		return -EINVAL;
+	}
+
+	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
+
+	V4L2ControlList controls;
+	controls.add(V4L2_CID_EXPOSURE, 1700);
+	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
+	ret = data->unicam_->setControls(&controls);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set controls";
+		return ret;
+	}
+
+	ret = data->isp_->output()->streamOn();
+	if (ret)
+		return ret;
+
+	ret = data->isp_->capture()->streamOn();
+	if (ret)
+		return ret;
+
+	ret = data->unicam_->streamOn();
+	if (ret)
+		return ret;
+
+	return ret;
+}
+
+void PipelineHandlerRPi::stop(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+
+	data->isp_->capture()->streamOff();
+	data->isp_->output()->streamOff();
+	data->unicam_->streamOff();
+
+	data->rawBuffers_.clear();
+}
+
+int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
+{
+	RPiCameraData *data = cameraData(camera);
+	Buffer *buffer = request->findBuffer(&data->stream_);
+	if (!buffer) {
+		LOG(RPI, Error)
+			<< "Attempt to queue request with invalid stream";
+
+		return -ENOENT;
+	}
+
+	int ret = data->isp_->capture()->queueBuffer(buffer);
+	if (ret < 0)
+		return ret;
+
+	PipelineHandler::queueRequest(camera, request);
+
+	return 0;
+}
+
+bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
+{
+	DeviceMatch unicam("unicam");
+	DeviceMatch codec("bcm2835-codec");
+
+	/* We explicitly need the ISP device from the MMAL codec driver. */
+	codec.add("bcm2835-codec-isp");
+
+	unicam_ = enumerator->search(unicam);
+	if (!unicam_)
+		return false;
+
+	codec_ = enumerator->search(codec);
+	if (!codec_)
+		return false;
+
+	unicam_->acquire();
+	codec_->acquire();
+
+	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
+
+	/* Locate and open the unicam video node. */
+	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
+	if (data->unicam_->open())
+		return false;
+
+	/* Locate the ISP M2M node */
+	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
+	if (!isp)
+		return false;
+
+	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
+	if (data->isp_->status()) {
+		LOG(RPI, Error) << "Could not create the ISP device";
+		return false;
+	}
+
+	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
+	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
+	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
+
+	/* Identify the sensor */
+	for (MediaEntity *entity : unicam_->entities()) {
+		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
+			data->sensor_ = new CameraSensor(entity);
+			break;
+		}
+	}
+
+	if (!data->sensor_)
+		return false;
+
+	int ret = data->sensor_->init();
+	if (ret)
+		return false;
+
+	/* Create and register the camera. */
+	std::set<Stream *> streams{ &data->stream_ };
+	std::shared_ptr<Camera> camera =
+		Camera::create(this, data->sensor_->entity()->name(), streams);
+	registerCamera(std::move(camera), std::move(data));
+
+	return true;
+}
+
+void RPiCameraData::sensorReady(Buffer *buffer)
+{
+	/* Deliver the frame from the sensor to the ISP */
+	isp_->output()->queueBuffer(buffer);
+}
+
+void RPiCameraData::ispOutputReady(Buffer *buffer)
+{
+	/* Return a completed buffer from the ISP back to the sensor */
+	unicam_->queueBuffer(buffer);
+}
+
+void RPiCameraData::ispCaptureReady(Buffer *buffer)
+{
+	Request *request = buffer->request();
+
+	pipe_->completeBuffer(camera_, request, buffer);
+	pipe_->completeRequest(camera_, request);
+}
+
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
+
+} /* namespace libcamera */