[libcamera-devel,11/11] libcamera: pipeline: Add RKISP1 pipeline

Message ID 20190415165700.22970-12-laurent.pinchart@ideasonboard.com
State Superseded
Headers show
Series
  • Rockchip ISP pipeline handler
Related show

Commit Message

Laurent Pinchart April 15, 2019, 4:57 p.m. UTC
The pipeline handler for the Rockchip ISP creates one camera instance
per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
are not supported yet.

As the ISP has a single CSI-2 receiver, only one camera can be used at a
time. Mutual exclusion isn't implemented yet.

Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
---
 src/libcamera/pipeline/meson.build        |   1 +
 src/libcamera/pipeline/rkisp1/meson.build |   3 +
 src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 448 ++++++++++++++++++++++
 3 files changed, 452 insertions(+)
 create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
 create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp

Comments

Niklas Söderlund April 15, 2019, 10:08 p.m. UTC | #1
Hi Laurent,

Thanks for your work.

On 2019-04-15 19:57:00 +0300, Laurent Pinchart wrote:
> The pipeline handler for the Rockchip ISP creates one camera instance
> per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
> are not supported yet.
> 
> As the ISP has a single CSI-2 receiver, only one camera can be used at a
> time. Mutual exclusion isn't implemented yet.
> 
> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>

I still want to test this before adding my tag so I need to setup my 
rkisp1 environment. Over all I think this looks good, I have some small 
comments bellow which you might to have a go at while I get my hardware 
in line.

> ---
>  src/libcamera/pipeline/meson.build        |   1 +
>  src/libcamera/pipeline/rkisp1/meson.build |   3 +
>  src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 448 ++++++++++++++++++++++
>  3 files changed, 452 insertions(+)
>  create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
>  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp
> 
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 40bb26405b88..0d466225a72e 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -4,3 +4,4 @@ libcamera_sources += files([
>  ])
>  
>  subdir('ipu3')
> +subdir('rkisp1')
> diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> new file mode 100644
> index 000000000000..f1cc4046b5d0
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/meson.build
> @@ -0,0 +1,3 @@
> +libcamera_sources += files([
> +    'rkisp1.cpp',
> +])
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> new file mode 100644
> index 000000000000..887c879d9024
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> @@ -0,0 +1,448 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * rkisp1.cpp - Pipeline handler for Rockchip ISP1
> + */
> +
> +#include <iomanip>
> +#include <memory>
> +#include <vector>
> +
> +#include <linux/media-bus-format.h>
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_device.h"
> +#include "v4l2_subdevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RkISP1)
> +
> +class PipelineHandlerRkISP1 : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRkISP1(CameraManager *manager);
> +	~PipelineHandlerRkISP1();
> +
> +	CameraConfiguration streamConfiguration(Camera *camera,
> +		const std::vector<StreamUsage> &usages) override;
> +	int configureStreams(Camera *camera,
> +			     const CameraConfiguration &config) override;
> +
> +	int allocateBuffers(Camera *camera, Stream *stream) override;
> +	int freeBuffers(Camera *camera, Stream *stream) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	class RkISP1CameraData : public CameraData
> +	{
> +	public:
> +		RkISP1CameraData(PipelineHandler *pipe)
> +			: CameraData(pipe), sensor_(nullptr)
> +		{
> +		}
> +
> +		~RkISP1CameraData()
> +		{
> +			delete sensor_;
> +		}
> +
> +		Stream stream_;
> +		CameraSensor *sensor_;
> +	};
> +
> +	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
> +
> +	RkISP1CameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RkISP1CameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	int initLinks();
> +	int createCamera(MediaEntity *sensor);
> +	void bufferReady(Buffer *buffer);
> +
> +	std::shared_ptr<MediaDevice> media_;
> +	V4L2Subdevice *dphy_;
> +	V4L2Subdevice *isp_;
> +	V4L2Device *video_;
> +
> +	Camera *activeCamera_;
> +};
> +
> +PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> +	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> +	  video_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
> +{
> +	delete video_;
> +	delete isp_;
> +	delete dphy_;
> +
> +	if (media_)
> +		media_->release();
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Pipeline Operations
> + */
> +
> +CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
> +	const std::vector<StreamUsage> &usages)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	CameraConfiguration configs;
> +	StreamConfiguration config{};
> +
> +	const Size &resolution = data->sensor_->resolution();
> +	config.width = resolution.width;
> +	config.height = resolution.height;
> +	config.pixelFormat = V4L2_PIX_FMT_NV12;
> +	config.bufferCount = RKISP1_BUFFER_COUNT;
> +
> +	configs[&data->stream_] = config;
> +
> +	LOG(RkISP1, Debug)
> +		<< "Stream format set to " << config.width << "x"
> +		<< config.height << "-0x" << std::hex << std::setfill('0')
> +		<< std::setw(8) << config.pixelFormat;
> +
> +	return configs;
> +}
> +
> +int PipelineHandlerRkISP1::configureStreams(Camera *camera,
> +					    const CameraConfiguration &config)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	const StreamConfiguration &cfg = config[&data->stream_];
> +	CameraSensor *sensor = data->sensor_;
> +	int ret;
> +
> +	/* Verify the configuration. */
> +	const Size &resolution = sensor->resolution();
> +	if (cfg.width > resolution.width ||
> +	    cfg.height > resolution.height) {
> +		LOG(RkISP1, Error)
> +			<< "Invalid stream size: larger than sensor resolution";
> +		return -EINVAL;
> +	}
> +
> +	/*
> +	 * Configure the sensor links: enable the link corresponding to this
> +	 * camera and disable all the other sensor links.
> +	 */
> +	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
> +
> +	ret = media_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	for (MediaLink *link : pad->links()) {
> +		bool enable = link->source()->entity() == sensor->entity();
> +
> +		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
> +			continue;
> +
> +		LOG(RkISP1, Debug)
> +			<< (enable ? "Enabling" : "Disabling")
> +			<< " link from sensor '"
> +			<< link->source()->entity()->name()
> +			<< "' to CSI-2 receiver";
> +
> +		ret = link->setEnabled(enable);
> +		if (ret < 0)
> +			break;
> +	}
> +
> +	media_->close();
> +
> +	if (ret < 0)
> +		return ret;
> +
> +	/*
> +	 * Configure the format on the sensor output and propagate it through
> +	 * the pipeline.
> +	 */
> +	V4L2SubdeviceFormat format;
> +	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
> +				     MEDIA_BUS_FMT_SGBRG12_1X12,
> +				     MEDIA_BUS_FMT_SGRBG12_1X12,
> +				     MEDIA_BUS_FMT_SRGGB12_1X12,
> +				     MEDIA_BUS_FMT_SBGGR10_1X10,
> +				     MEDIA_BUS_FMT_SGBRG10_1X10,
> +				     MEDIA_BUS_FMT_SGRBG10_1X10,
> +				     MEDIA_BUS_FMT_SRGGB10_1X10,
> +				     MEDIA_BUS_FMT_SBGGR8_1X8,
> +				     MEDIA_BUS_FMT_SGBRG8_1X8,
> +				     MEDIA_BUS_FMT_SGRBG8_1X8,
> +				     MEDIA_BUS_FMT_SRGGB8_1X8 },
> +				   Size(cfg.width, cfg.height));
> +	ret = sensor->setFormat(&format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->getFormat(1, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = isp_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	V4L2DeviceFormat outputFormat = {};
> +	outputFormat.width = cfg.width;
> +	outputFormat.height = cfg.height;
> +	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
> +	outputFormat.planesCount = 2;
> +
> +	ret = video_->setFormat(&outputFormat);
> +	if (ret)
> +		return ret;
> +
> +	LOG(RkISP1, Debug)
> +		<< "Configured pipeline for " << format.toString() << " -> "
> +		<< outputFormat.toString();

Is this LOG needed as you in 3/11 print this in 
Camera::configureStreams() ?

> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
> +{
> +	return video_->exportBuffers(&stream->bufferPool());
> +}
> +
> +int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
> +{
> +	if (video_->releaseBuffers())
> +		LOG(RkISP1, Error) << "Failed to release buffers";
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::start(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOn();
> +	if (ret)
> +		LOG(RkISP1, Error)
> +			<< "Failed to start camera " << camera->name();
> +
> +	activeCamera_ = camera;
> +
> +	return ret;
> +}
> +
> +void PipelineHandlerRkISP1::stop(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOff();
> +	if (ret)
> +		LOG(RkISP1, Warning)
> +			<< "Failed to stop camera " << camera->name();
> +
> +	PipelineHandler::stop(camera);
> +
> +	activeCamera_ = nullptr;
> +}
> +
> +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	Stream *stream = &data->stream_;
> +
> +	Buffer *buffer = request->findBuffer(stream);
> +	if (!buffer) {
> +		LOG(RkISP1, Error)
> +			<< "Attempt to queue request with invalid stream";
> +		return -ENOENT;
> +	}

Should we move this check to Camera::queueRequest() ?

> +
> +	int ret = video_->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Match and Setup
> + */
> +
> +int PipelineHandlerRkISP1::initLinks()
> +{
> +	MediaLink *link;
> +	int ret;
> +
> +	ret = media_->disableLinks();
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> +{
> +	int ret;
> +
> +	std::unique_ptr<RkISP1CameraData> data =
> +		utils::make_unique<RkISP1CameraData>(this);
> +
> +	data->sensor_ = new CameraSensor(sensor);
> +	ret = data->sensor_->init();
> +	if (ret)
> +		return ret;
> +
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, sensor->name(), streams);
> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> +{
> +	int ret;
> +
> +	DeviceMatch dm("rkisp1");
> +	dm.add("rkisp1-isp-subdev");
> +	dm.add("rkisp1_selfpath");
> +	dm.add("rkisp1_mainpath");
> +	dm.add("rkisp1-statistics");
> +	dm.add("rkisp1-input-params");
> +	dm.add("rockchip-sy-mipi-dphy");
> +
> +	media_ = enumerator->search(dm);
> +	if (!media_)
> +		return false;
> +
> +	media_->acquire();
> +
> +	ret = media_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	/* Create the V4L2 subdevices we will need. */
> +	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
> +	dphy_ = new V4L2Subdevice(dphy);
> +	ret = dphy_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
> +	isp_ = new V4L2Subdevice(isp);
> +	ret = isp_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	/* Locate and open the capture video node. */
> +	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
> +	if (video_->open())
> +		return false;
> +
> +	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> +
> +	/* Configure default links. */
> +	ret = initLinks();
> +	if (ret < 0) {
> +		LOG(RkISP1, Error) << "Failed to setup links";
> +		return false;
> +	}
> +
> +	/*
> +	 * Enumerate all sensors connected to the CSI-2 receiver and create one
> +	 * camera instance for each of them.
> +	 */
> +	const MediaPad *pad = dphy->getPadByIndex(0);
> +	if (!pad) {
> +		ret = -EINVAL;
> +		goto done;
> +	}
> +
> +	for (MediaLink *link : pad->links()) {
> +		/*
> +		 * A sensor is expected to have a single pad and expose the
> +		 * SENSOR function. This is overly restrictive, a better logic
> +		 * is needed to support sensor that expose multiple subdevs.
> +		 *
> +		 * \todo Create a helper to identify sensors
> +		 */
> +		MediaEntity *sensor = link->source()->entity();
> +		if (!sensor)
> +			continue;
> +		if (sensor->pads().size() != 1)
> +			continue;
> +		if (sensor->function() != MEDIA_ENT_F_CAM_SENSOR)
> +			continue;

Some of these checks overlap with CameraSensor::init(). Should all of 
them be moved there? createCamera() will fail if CameraSensor::init() 
fails anyhow.

> +
> +		createCamera(sensor);
> +	}
> +
> +done:
> +	media_->close();
> +
> +	return ret == 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Buffer Handling
> + */
> +
> +void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
> +{
> +	ASSERT(activeCamera_);
> +
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +	Request *request = data->queuedRequests_.front();
> +
> +	completeBuffer(activeCamera_, request, buffer);
> +	completeRequest(activeCamera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> +
> +} /* namespace libcamera */
> -- 
> Regards,
> 
> Laurent Pinchart
> 
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel
Laurent Pinchart April 15, 2019, 11:09 p.m. UTC | #2
Hi Niklas,

On Tue, Apr 16, 2019 at 12:08:11AM +0200, Niklas Söderlund wrote:
> On 2019-04-15 19:57:00 +0300, Laurent Pinchart wrote:
> > The pipeline handler for the Rockchip ISP creates one camera instance
> > per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
> > are not supported yet.
> > 
> > As the ISP has a single CSI-2 receiver, only one camera can be used at a
> > time. Mutual exclusion isn't implemented yet.
> > 
> > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> 
> I still want to test this before adding my tag so I need to setup my 
> rkisp1 environment. Over all I think this looks good, I have some small 
> comments bellow which you might to have a go at while I get my hardware 
> in line.
> 
> > ---
> >  src/libcamera/pipeline/meson.build        |   1 +
> >  src/libcamera/pipeline/rkisp1/meson.build |   3 +
> >  src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 448 ++++++++++++++++++++++
> >  3 files changed, 452 insertions(+)
> >  create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
> >  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > 
> > diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> > index 40bb26405b88..0d466225a72e 100644
> > --- a/src/libcamera/pipeline/meson.build
> > +++ b/src/libcamera/pipeline/meson.build
> > @@ -4,3 +4,4 @@ libcamera_sources += files([
> >  ])
> >  
> >  subdir('ipu3')
> > +subdir('rkisp1')
> > diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> > new file mode 100644
> > index 000000000000..f1cc4046b5d0
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/rkisp1/meson.build
> > @@ -0,0 +1,3 @@
> > +libcamera_sources += files([
> > +    'rkisp1.cpp',
> > +])
> > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > new file mode 100644
> > index 000000000000..887c879d9024
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > @@ -0,0 +1,448 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2019, Google Inc.
> > + *
> > + * rkisp1.cpp - Pipeline handler for Rockchip ISP1
> > + */
> > +
> > +#include <iomanip>
> > +#include <memory>
> > +#include <vector>
> > +
> > +#include <linux/media-bus-format.h>
> > +
> > +#include <libcamera/camera.h>
> > +#include <libcamera/request.h>
> > +#include <libcamera/stream.h>
> > +
> > +#include "camera_sensor.h"
> > +#include "device_enumerator.h"
> > +#include "log.h"
> > +#include "media_device.h"
> > +#include "pipeline_handler.h"
> > +#include "utils.h"
> > +#include "v4l2_device.h"
> > +#include "v4l2_subdevice.h"
> > +
> > +namespace libcamera {
> > +
> > +LOG_DEFINE_CATEGORY(RkISP1)
> > +
> > +class PipelineHandlerRkISP1 : public PipelineHandler
> > +{
> > +public:
> > +	PipelineHandlerRkISP1(CameraManager *manager);
> > +	~PipelineHandlerRkISP1();
> > +
> > +	CameraConfiguration streamConfiguration(Camera *camera,
> > +		const std::vector<StreamUsage> &usages) override;
> > +	int configureStreams(Camera *camera,
> > +			     const CameraConfiguration &config) override;
> > +
> > +	int allocateBuffers(Camera *camera, Stream *stream) override;
> > +	int freeBuffers(Camera *camera, Stream *stream) override;
> > +
> > +	int start(Camera *camera) override;
> > +	void stop(Camera *camera) override;
> > +
> > +	int queueRequest(Camera *camera, Request *request) override;
> > +
> > +	bool match(DeviceEnumerator *enumerator) override;
> > +
> > +private:
> > +	class RkISP1CameraData : public CameraData
> > +	{
> > +	public:
> > +		RkISP1CameraData(PipelineHandler *pipe)
> > +			: CameraData(pipe), sensor_(nullptr)
> > +		{
> > +		}
> > +
> > +		~RkISP1CameraData()
> > +		{
> > +			delete sensor_;
> > +		}
> > +
> > +		Stream stream_;
> > +		CameraSensor *sensor_;
> > +	};
> > +
> > +	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
> > +
> > +	RkISP1CameraData *cameraData(const Camera *camera)
> > +	{
> > +		return static_cast<RkISP1CameraData *>(
> > +			PipelineHandler::cameraData(camera));
> > +	}
> > +
> > +	int initLinks();
> > +	int createCamera(MediaEntity *sensor);
> > +	void bufferReady(Buffer *buffer);
> > +
> > +	std::shared_ptr<MediaDevice> media_;
> > +	V4L2Subdevice *dphy_;
> > +	V4L2Subdevice *isp_;
> > +	V4L2Device *video_;
> > +
> > +	Camera *activeCamera_;
> > +};
> > +
> > +PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> > +	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> > +	  video_(nullptr)
> > +{
> > +}
> > +
> > +PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
> > +{
> > +	delete video_;
> > +	delete isp_;
> > +	delete dphy_;
> > +
> > +	if (media_)
> > +		media_->release();
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Pipeline Operations
> > + */
> > +
> > +CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
> > +	const std::vector<StreamUsage> &usages)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	CameraConfiguration configs;
> > +	StreamConfiguration config{};
> > +
> > +	const Size &resolution = data->sensor_->resolution();
> > +	config.width = resolution.width;
> > +	config.height = resolution.height;
> > +	config.pixelFormat = V4L2_PIX_FMT_NV12;
> > +	config.bufferCount = RKISP1_BUFFER_COUNT;
> > +
> > +	configs[&data->stream_] = config;
> > +
> > +	LOG(RkISP1, Debug)
> > +		<< "Stream format set to " << config.width << "x"
> > +		<< config.height << "-0x" << std::hex << std::setfill('0')
> > +		<< std::setw(8) << config.pixelFormat;
> > +
> > +	return configs;
> > +}
> > +
> > +int PipelineHandlerRkISP1::configureStreams(Camera *camera,
> > +					    const CameraConfiguration &config)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	const StreamConfiguration &cfg = config[&data->stream_];
> > +	CameraSensor *sensor = data->sensor_;
> > +	int ret;
> > +
> > +	/* Verify the configuration. */
> > +	const Size &resolution = sensor->resolution();
> > +	if (cfg.width > resolution.width ||
> > +	    cfg.height > resolution.height) {
> > +		LOG(RkISP1, Error)
> > +			<< "Invalid stream size: larger than sensor resolution";
> > +		return -EINVAL;
> > +	}
> > +
> > +	/*
> > +	 * Configure the sensor links: enable the link corresponding to this
> > +	 * camera and disable all the other sensor links.
> > +	 */
> > +	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
> > +
> > +	ret = media_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	for (MediaLink *link : pad->links()) {
> > +		bool enable = link->source()->entity() == sensor->entity();
> > +
> > +		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
> > +			continue;
> > +
> > +		LOG(RkISP1, Debug)
> > +			<< (enable ? "Enabling" : "Disabling")
> > +			<< " link from sensor '"
> > +			<< link->source()->entity()->name()
> > +			<< "' to CSI-2 receiver";
> > +
> > +		ret = link->setEnabled(enable);
> > +		if (ret < 0)
> > +			break;
> > +	}
> > +
> > +	media_->close();
> > +
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/*
> > +	 * Configure the format on the sensor output and propagate it through
> > +	 * the pipeline.
> > +	 */
> > +	V4L2SubdeviceFormat format;
> > +	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
> > +				     MEDIA_BUS_FMT_SGBRG12_1X12,
> > +				     MEDIA_BUS_FMT_SGRBG12_1X12,
> > +				     MEDIA_BUS_FMT_SRGGB12_1X12,
> > +				     MEDIA_BUS_FMT_SBGGR10_1X10,
> > +				     MEDIA_BUS_FMT_SGBRG10_1X10,
> > +				     MEDIA_BUS_FMT_SGRBG10_1X10,
> > +				     MEDIA_BUS_FMT_SRGGB10_1X10,
> > +				     MEDIA_BUS_FMT_SBGGR8_1X8,
> > +				     MEDIA_BUS_FMT_SGBRG8_1X8,
> > +				     MEDIA_BUS_FMT_SGRBG8_1X8,
> > +				     MEDIA_BUS_FMT_SRGGB8_1X8 },
> > +				   Size(cfg.width, cfg.height));
> > +	ret = sensor->setFormat(&format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	ret = dphy_->setFormat(0, &format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	ret = dphy_->getFormat(1, &format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	ret = isp_->setFormat(0, &format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	V4L2DeviceFormat outputFormat = {};
> > +	outputFormat.width = cfg.width;
> > +	outputFormat.height = cfg.height;
> > +	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
> > +	outputFormat.planesCount = 2;
> > +
> > +	ret = video_->setFormat(&outputFormat);
> > +	if (ret)
> > +		return ret;
> > +
> > +	LOG(RkISP1, Debug)
> > +		<< "Configured pipeline for " << format.toString() << " -> "
> > +		<< outputFormat.toString();
> 
> Is this LOG needed as you in 3/11 print this in 
> Camera::configureStreams() ?

The first part could be useful, as it tells which format was configured
on the sensor (before scaling) so I think it's worth keeping it. I will
remove the second part (outputFormat) and make it more explicit that the
printed format relates to the sensor.

> > +
> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
> > +{
> > +	return video_->exportBuffers(&stream->bufferPool());
> > +}
> > +
> > +int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
> > +{
> > +	if (video_->releaseBuffers())
> > +		LOG(RkISP1, Error) << "Failed to release buffers";
> > +
> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::start(Camera *camera)
> > +{
> > +	int ret;
> > +
> > +	ret = video_->streamOn();
> > +	if (ret)
> > +		LOG(RkISP1, Error)
> > +			<< "Failed to start camera " << camera->name();
> > +
> > +	activeCamera_ = camera;
> > +
> > +	return ret;
> > +}
> > +
> > +void PipelineHandlerRkISP1::stop(Camera *camera)
> > +{
> > +	int ret;
> > +
> > +	ret = video_->streamOff();
> > +	if (ret)
> > +		LOG(RkISP1, Warning)
> > +			<< "Failed to stop camera " << camera->name();
> > +
> > +	PipelineHandler::stop(camera);
> > +
> > +	activeCamera_ = nullptr;
> > +}
> > +
> > +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	Stream *stream = &data->stream_;
> > +
> > +	Buffer *buffer = request->findBuffer(stream);
> > +	if (!buffer) {
> > +		LOG(RkISP1, Error)
> > +			<< "Attempt to queue request with invalid stream";
> > +		return -ENOENT;
> > +	}
> 
> Should we move this check to Camera::queueRequest() ?

The Camera class doesn't have access to data->stream_. I expect this to
be reworked with multiple streams support anyway, so I'd keep it as-is
for now.

> > +
> > +	int ret = video_->queueBuffer(buffer);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	PipelineHandler::queueRequest(camera, request);
> > +
> > +	return 0;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Match and Setup
> > + */
> > +
> > +int PipelineHandlerRkISP1::initLinks()
> > +{
> > +	MediaLink *link;
> > +	int ret;
> > +
> > +	ret = media_->disableLinks();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
> > +	if (!link)
> > +		return -ENODEV;
> > +
> > +	ret = link->setEnabled(true);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
> > +	if (!link)
> > +		return -ENODEV;
> > +
> > +	ret = link->setEnabled(true);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> > +{
> > +	int ret;
> > +
> > +	std::unique_ptr<RkISP1CameraData> data =
> > +		utils::make_unique<RkISP1CameraData>(this);
> > +
> > +	data->sensor_ = new CameraSensor(sensor);
> > +	ret = data->sensor_->init();
> > +	if (ret)
> > +		return ret;
> > +
> > +	std::set<Stream *> streams{ &data->stream_ };
> > +	std::shared_ptr<Camera> camera =
> > +		Camera::create(this, sensor->name(), streams);
> > +	registerCamera(std::move(camera), std::move(data));
> > +
> > +	return 0;
> > +}
> > +
> > +bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > +{
> > +	int ret;
> > +
> > +	DeviceMatch dm("rkisp1");
> > +	dm.add("rkisp1-isp-subdev");
> > +	dm.add("rkisp1_selfpath");
> > +	dm.add("rkisp1_mainpath");
> > +	dm.add("rkisp1-statistics");
> > +	dm.add("rkisp1-input-params");
> > +	dm.add("rockchip-sy-mipi-dphy");
> > +
> > +	media_ = enumerator->search(dm);
> > +	if (!media_)
> > +		return false;
> > +
> > +	media_->acquire();
> > +
> > +	ret = media_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/* Create the V4L2 subdevices we will need. */
> > +	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
> > +	dphy_ = new V4L2Subdevice(dphy);
> > +	ret = dphy_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
> > +	isp_ = new V4L2Subdevice(isp);
> > +	ret = isp_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/* Locate and open the capture video node. */
> > +	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
> > +	if (video_->open())
> > +		return false;
> > +
> > +	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> > +
> > +	/* Configure default links. */
> > +	ret = initLinks();
> > +	if (ret < 0) {
> > +		LOG(RkISP1, Error) << "Failed to setup links";
> > +		return false;
> > +	}
> > +
> > +	/*
> > +	 * Enumerate all sensors connected to the CSI-2 receiver and create one
> > +	 * camera instance for each of them.
> > +	 */
> > +	const MediaPad *pad = dphy->getPadByIndex(0);
> > +	if (!pad) {
> > +		ret = -EINVAL;
> > +		goto done;
> > +	}
> > +
> > +	for (MediaLink *link : pad->links()) {
> > +		/*
> > +		 * A sensor is expected to have a single pad and expose the
> > +		 * SENSOR function. This is overly restrictive, a better logic
> > +		 * is needed to support sensor that expose multiple subdevs.
> > +		 *
> > +		 * \todo Create a helper to identify sensors
> > +		 */
> > +		MediaEntity *sensor = link->source()->entity();
> > +		if (!sensor)
> > +			continue;
> > +		if (sensor->pads().size() != 1)
> > +			continue;
> > +		if (sensor->function() != MEDIA_ENT_F_CAM_SENSOR)
> > +			continue;
> 
> Some of these checks overlap with CameraSensor::init(). Should all of 
> them be moved there? createCamera() will fail if CameraSensor::init() 
> fails anyhow.

Good point, I'll fix this. I will also remove the comment, as the
limitation will then be in the CameraSensor class, not here.

> > +
> > +		createCamera(sensor);
> > +	}
> > +
> > +done:
> > +	media_->close();
> > +
> > +	return ret == 0;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Buffer Handling
> > + */
> > +
> > +void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
> > +{
> > +	ASSERT(activeCamera_);
> > +
> > +	RkISP1CameraData *data = cameraData(activeCamera_);
> > +	Request *request = data->queuedRequests_.front();
> > +
> > +	completeBuffer(activeCamera_, request, buffer);
> > +	completeRequest(activeCamera_, request);
> > +}
> > +
> > +REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> > +
> > +} /* namespace libcamera */
Niklas Söderlund April 15, 2019, 11:29 p.m. UTC | #3
Hi Laurent,

On 2019-04-16 02:09:52 +0300, Laurent Pinchart wrote:
> > > +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request 
> > > *request)
> > > +{
> > > +	RkISP1CameraData *data = cameraData(camera);
> > > +	Stream *stream = &data->stream_;
> > > +
> > > +	Buffer *buffer = request->findBuffer(stream);
> > > +	if (!buffer) {
> > > +		LOG(RkISP1, Error)
> > > +			<< "Attempt to queue request with invalid stream";
> > > +		return -ENOENT;
> > > +	}
> > 
> > Should we move this check to Camera::queueRequest() ?
> 
> The Camera class doesn't have access to data->stream_. I expect this to
> be reworked with multiple streams support anyway, so I'd keep it as-is
> for now.

The Camera has access to something better, all streams configured. With 
the pending patch [1] from Jacopo adding Request::buffers(), something 
like this in Camera::queueRequest() would address both single and 
multiple streams.

+       for (auto const &it : request->buffers()) {
+               Stream *stream = it.first;
+               if (activeStreams_.find(stream) == activeStreams_.end()) {
+                       LOG(Camera, Error)
+                               << "Attempt to queue request with invalid stream";
+                       return -EINVAL;
+               }
+       }

1. [PATCH v5 6/7] libcamera: request: Expose the Stream to Buffers map
Laurent Pinchart April 16, 2019, 10:31 p.m. UTC | #4
Hi Niklas,

On Tue, Apr 16, 2019 at 01:29:29AM +0200, Niklas Söderlund wrote:
> On 2019-04-16 02:09:52 +0300, Laurent Pinchart wrote:
> >>> +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request 
> >>> *request)
> >>> +{
> >>> +	RkISP1CameraData *data = cameraData(camera);
> >>> +	Stream *stream = &data->stream_;
> >>> +
> >>> +	Buffer *buffer = request->findBuffer(stream);
> >>> +	if (!buffer) {
> >>> +		LOG(RkISP1, Error)
> >>> +			<< "Attempt to queue request with invalid stream";
> >>> +		return -ENOENT;
> >>> +	}
> >> 
> >> Should we move this check to Camera::queueRequest() ?
> > 
> > The Camera class doesn't have access to data->stream_. I expect this to
> > be reworked with multiple streams support anyway, so I'd keep it as-is
> > for now.
> 
> The Camera has access to something better, all streams configured. With 
> the pending patch [1] from Jacopo adding Request::buffers(), something 
> like this in Camera::queueRequest() would address both single and 
> multiple streams.
> 
> +       for (auto const &it : request->buffers()) {
> +               Stream *stream = it.first;
> +               if (activeStreams_.find(stream) == activeStreams_.end()) {
> +                       LOG(Camera, Error)
> +                               << "Attempt to queue request with invalid stream";
> +                       return -EINVAL;
> +               }
> +       }
> 
> 1. [PATCH v5 6/7] libcamera: request: Expose the Stream to Buffers map

To avoid adding a dependency between the two series, let's do this on
top when they will be merged. Would you like to send a patch ? :-)

Patch

diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
index 40bb26405b88..0d466225a72e 100644
--- a/src/libcamera/pipeline/meson.build
+++ b/src/libcamera/pipeline/meson.build
@@ -4,3 +4,4 @@  libcamera_sources += files([
 ])
 
 subdir('ipu3')
+subdir('rkisp1')
diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
new file mode 100644
index 000000000000..f1cc4046b5d0
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/meson.build
@@ -0,0 +1,3 @@ 
+libcamera_sources += files([
+    'rkisp1.cpp',
+])
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
new file mode 100644
index 000000000000..887c879d9024
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -0,0 +1,448 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * rkisp1.cpp - Pipeline handler for Rockchip ISP1
+ */
+
+#include <iomanip>
+#include <memory>
+#include <vector>
+
+#include <linux/media-bus-format.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+#include "camera_sensor.h"
+#include "device_enumerator.h"
+#include "log.h"
+#include "media_device.h"
+#include "pipeline_handler.h"
+#include "utils.h"
+#include "v4l2_device.h"
+#include "v4l2_subdevice.h"
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(RkISP1)
+
+class PipelineHandlerRkISP1 : public PipelineHandler
+{
+public:
+	PipelineHandlerRkISP1(CameraManager *manager);
+	~PipelineHandlerRkISP1();
+
+	CameraConfiguration streamConfiguration(Camera *camera,
+		const std::vector<StreamUsage> &usages) override;
+	int configureStreams(Camera *camera,
+			     const CameraConfiguration &config) override;
+
+	int allocateBuffers(Camera *camera, Stream *stream) override;
+	int freeBuffers(Camera *camera, Stream *stream) override;
+
+	int start(Camera *camera) override;
+	void stop(Camera *camera) override;
+
+	int queueRequest(Camera *camera, Request *request) override;
+
+	bool match(DeviceEnumerator *enumerator) override;
+
+private:
+	class RkISP1CameraData : public CameraData
+	{
+	public:
+		RkISP1CameraData(PipelineHandler *pipe)
+			: CameraData(pipe), sensor_(nullptr)
+		{
+		}
+
+		~RkISP1CameraData()
+		{
+			delete sensor_;
+		}
+
+		Stream stream_;
+		CameraSensor *sensor_;
+	};
+
+	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
+
+	RkISP1CameraData *cameraData(const Camera *camera)
+	{
+		return static_cast<RkISP1CameraData *>(
+			PipelineHandler::cameraData(camera));
+	}
+
+	int initLinks();
+	int createCamera(MediaEntity *sensor);
+	void bufferReady(Buffer *buffer);
+
+	std::shared_ptr<MediaDevice> media_;
+	V4L2Subdevice *dphy_;
+	V4L2Subdevice *isp_;
+	V4L2Device *video_;
+
+	Camera *activeCamera_;
+};
+
+PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
+	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
+	  video_(nullptr)
+{
+}
+
+PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
+{
+	delete video_;
+	delete isp_;
+	delete dphy_;
+
+	if (media_)
+		media_->release();
+}
+
+/* -----------------------------------------------------------------------------
+ * Pipeline Operations
+ */
+
+CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
+	const std::vector<StreamUsage> &usages)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	CameraConfiguration configs;
+	StreamConfiguration config{};
+
+	const Size &resolution = data->sensor_->resolution();
+	config.width = resolution.width;
+	config.height = resolution.height;
+	config.pixelFormat = V4L2_PIX_FMT_NV12;
+	config.bufferCount = RKISP1_BUFFER_COUNT;
+
+	configs[&data->stream_] = config;
+
+	LOG(RkISP1, Debug)
+		<< "Stream format set to " << config.width << "x"
+		<< config.height << "-0x" << std::hex << std::setfill('0')
+		<< std::setw(8) << config.pixelFormat;
+
+	return configs;
+}
+
+int PipelineHandlerRkISP1::configureStreams(Camera *camera,
+					    const CameraConfiguration &config)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	const StreamConfiguration &cfg = config[&data->stream_];
+	CameraSensor *sensor = data->sensor_;
+	int ret;
+
+	/* Verify the configuration. */
+	const Size &resolution = sensor->resolution();
+	if (cfg.width > resolution.width ||
+	    cfg.height > resolution.height) {
+		LOG(RkISP1, Error)
+			<< "Invalid stream size: larger than sensor resolution";
+		return -EINVAL;
+	}
+
+	/*
+	 * Configure the sensor links: enable the link corresponding to this
+	 * camera and disable all the other sensor links.
+	 */
+	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
+
+	ret = media_->open();
+	if (ret < 0)
+		return ret;
+
+	for (MediaLink *link : pad->links()) {
+		bool enable = link->source()->entity() == sensor->entity();
+
+		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
+			continue;
+
+		LOG(RkISP1, Debug)
+			<< (enable ? "Enabling" : "Disabling")
+			<< " link from sensor '"
+			<< link->source()->entity()->name()
+			<< "' to CSI-2 receiver";
+
+		ret = link->setEnabled(enable);
+		if (ret < 0)
+			break;
+	}
+
+	media_->close();
+
+	if (ret < 0)
+		return ret;
+
+	/*
+	 * Configure the format on the sensor output and propagate it through
+	 * the pipeline.
+	 */
+	V4L2SubdeviceFormat format;
+	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
+				     MEDIA_BUS_FMT_SGBRG12_1X12,
+				     MEDIA_BUS_FMT_SGRBG12_1X12,
+				     MEDIA_BUS_FMT_SRGGB12_1X12,
+				     MEDIA_BUS_FMT_SBGGR10_1X10,
+				     MEDIA_BUS_FMT_SGBRG10_1X10,
+				     MEDIA_BUS_FMT_SGRBG10_1X10,
+				     MEDIA_BUS_FMT_SRGGB10_1X10,
+				     MEDIA_BUS_FMT_SBGGR8_1X8,
+				     MEDIA_BUS_FMT_SGBRG8_1X8,
+				     MEDIA_BUS_FMT_SGRBG8_1X8,
+				     MEDIA_BUS_FMT_SRGGB8_1X8 },
+				   Size(cfg.width, cfg.height));
+	ret = sensor->setFormat(&format);
+	if (ret < 0)
+		return ret;
+
+	ret = dphy_->setFormat(0, &format);
+	if (ret < 0)
+		return ret;
+
+	ret = dphy_->getFormat(1, &format);
+	if (ret < 0)
+		return ret;
+
+	ret = isp_->setFormat(0, &format);
+	if (ret < 0)
+		return ret;
+
+	V4L2DeviceFormat outputFormat = {};
+	outputFormat.width = cfg.width;
+	outputFormat.height = cfg.height;
+	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
+	outputFormat.planesCount = 2;
+
+	ret = video_->setFormat(&outputFormat);
+	if (ret)
+		return ret;
+
+	LOG(RkISP1, Debug)
+		<< "Configured pipeline for " << format.toString() << " -> "
+		<< outputFormat.toString();
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
+{
+	return video_->exportBuffers(&stream->bufferPool());
+}
+
+int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
+{
+	if (video_->releaseBuffers())
+		LOG(RkISP1, Error) << "Failed to release buffers";
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::start(Camera *camera)
+{
+	int ret;
+
+	ret = video_->streamOn();
+	if (ret)
+		LOG(RkISP1, Error)
+			<< "Failed to start camera " << camera->name();
+
+	activeCamera_ = camera;
+
+	return ret;
+}
+
+void PipelineHandlerRkISP1::stop(Camera *camera)
+{
+	int ret;
+
+	ret = video_->streamOff();
+	if (ret)
+		LOG(RkISP1, Warning)
+			<< "Failed to stop camera " << camera->name();
+
+	PipelineHandler::stop(camera);
+
+	activeCamera_ = nullptr;
+}
+
+int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	Stream *stream = &data->stream_;
+
+	Buffer *buffer = request->findBuffer(stream);
+	if (!buffer) {
+		LOG(RkISP1, Error)
+			<< "Attempt to queue request with invalid stream";
+		return -ENOENT;
+	}
+
+	int ret = video_->queueBuffer(buffer);
+	if (ret < 0)
+		return ret;
+
+	PipelineHandler::queueRequest(camera, request);
+
+	return 0;
+}
+
+/* -----------------------------------------------------------------------------
+ * Match and Setup
+ */
+
+int PipelineHandlerRkISP1::initLinks()
+{
+	MediaLink *link;
+	int ret;
+
+	ret = media_->disableLinks();
+	if (ret < 0)
+		return ret;
+
+	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
+	if (!link)
+		return -ENODEV;
+
+	ret = link->setEnabled(true);
+	if (ret < 0)
+		return ret;
+
+	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
+	if (!link)
+		return -ENODEV;
+
+	ret = link->setEnabled(true);
+	if (ret < 0)
+		return ret;
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
+{
+	int ret;
+
+	std::unique_ptr<RkISP1CameraData> data =
+		utils::make_unique<RkISP1CameraData>(this);
+
+	data->sensor_ = new CameraSensor(sensor);
+	ret = data->sensor_->init();
+	if (ret)
+		return ret;
+
+	std::set<Stream *> streams{ &data->stream_ };
+	std::shared_ptr<Camera> camera =
+		Camera::create(this, sensor->name(), streams);
+	registerCamera(std::move(camera), std::move(data));
+
+	return 0;
+}
+
+bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
+{
+	int ret;
+
+	DeviceMatch dm("rkisp1");
+	dm.add("rkisp1-isp-subdev");
+	dm.add("rkisp1_selfpath");
+	dm.add("rkisp1_mainpath");
+	dm.add("rkisp1-statistics");
+	dm.add("rkisp1-input-params");
+	dm.add("rockchip-sy-mipi-dphy");
+
+	media_ = enumerator->search(dm);
+	if (!media_)
+		return false;
+
+	media_->acquire();
+
+	ret = media_->open();
+	if (ret < 0)
+		return ret;
+
+	/* Create the V4L2 subdevices we will need. */
+	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
+	dphy_ = new V4L2Subdevice(dphy);
+	ret = dphy_->open();
+	if (ret < 0)
+		return ret;
+
+	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
+	isp_ = new V4L2Subdevice(isp);
+	ret = isp_->open();
+	if (ret < 0)
+		return ret;
+
+	/* Locate and open the capture video node. */
+	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
+	if (video_->open())
+		return false;
+
+	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
+
+	/* Configure default links. */
+	ret = initLinks();
+	if (ret < 0) {
+		LOG(RkISP1, Error) << "Failed to setup links";
+		return false;
+	}
+
+	/*
+	 * Enumerate all sensors connected to the CSI-2 receiver and create one
+	 * camera instance for each of them.
+	 */
+	const MediaPad *pad = dphy->getPadByIndex(0);
+	if (!pad) {
+		ret = -EINVAL;
+		goto done;
+	}
+
+	for (MediaLink *link : pad->links()) {
+		/*
+		 * A sensor is expected to have a single pad and expose the
+		 * SENSOR function. This is overly restrictive, a better logic
+		 * is needed to support sensor that expose multiple subdevs.
+		 *
+		 * \todo Create a helper to identify sensors
+		 */
+		MediaEntity *sensor = link->source()->entity();
+		if (!sensor)
+			continue;
+		if (sensor->pads().size() != 1)
+			continue;
+		if (sensor->function() != MEDIA_ENT_F_CAM_SENSOR)
+			continue;
+
+		createCamera(sensor);
+	}
+
+done:
+	media_->close();
+
+	return ret == 0;
+}
+
+/* -----------------------------------------------------------------------------
+ * Buffer Handling
+ */
+
+void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
+{
+	ASSERT(activeCamera_);
+
+	RkISP1CameraData *data = cameraData(activeCamera_);
+	Request *request = data->queuedRequests_.front();
+
+	completeBuffer(activeCamera_, request, buffer);
+	completeRequest(activeCamera_, request);
+}
+
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
+
+} /* namespace libcamera */