[libcamera-devel,v2,13/13] libcamera: pipeline: Add RKISP1 pipeline

Message ID 20190416220839.1577-14-laurent.pinchart@ideasonboard.com
State Superseded
Headers show
Series
  • Rockchip ISP pipeline handler
Related show

Commit Message

Laurent Pinchart April 16, 2019, 10:08 p.m. UTC
The pipeline handler for the Rockchip ISP creates one camera instance
per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
are not supported yet.

As the ISP has a single CSI-2 receiver, only one camera can be used at a
time. Mutual exclusion isn't implemented yet.

Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
---
Changes since v1:

- Move sensor entity sanity checks to CameraSensor class
- Remove unneeded LOG() message
---
 src/libcamera/pipeline/meson.build        |   1 +
 src/libcamera/pipeline/rkisp1/meson.build |   3 +
 src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 431 ++++++++++++++++++++++
 3 files changed, 435 insertions(+)
 create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
 create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp

Comments

Jacopo Mondi April 17, 2019, 12:32 p.m. UTC | #1
Hi Laurent,

On Wed, Apr 17, 2019 at 01:08:39AM +0300, Laurent Pinchart wrote:
> The pipeline handler for the Rockchip ISP creates one camera instance
> per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
> are not supported yet.
>
> As the ISP has a single CSI-2 receiver, only one camera can be used at a
> time. Mutual exclusion isn't implemented yet.
>
> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> ---
> Changes since v1:
>
> - Move sensor entity sanity checks to CameraSensor class
> - Remove unneeded LOG() message
> ---
>  src/libcamera/pipeline/meson.build        |   1 +
>  src/libcamera/pipeline/rkisp1/meson.build |   3 +
>  src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 431 ++++++++++++++++++++++
>  3 files changed, 435 insertions(+)
>  create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
>  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp
>
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 40bb26405b88..0d466225a72e 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -4,3 +4,4 @@ libcamera_sources += files([
>  ])
>
>  subdir('ipu3')
> +subdir('rkisp1')
> diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> new file mode 100644
> index 000000000000..f1cc4046b5d0
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/meson.build
> @@ -0,0 +1,3 @@
> +libcamera_sources += files([
> +    'rkisp1.cpp',
> +])
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> new file mode 100644
> index 000000000000..d44b546fa008
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> @@ -0,0 +1,431 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * rkisp1.cpp - Pipeline handler for Rockchip ISP1
> + */
> +
> +#include <iomanip>
> +#include <memory>
> +#include <vector>
> +
> +#include <linux/media-bus-format.h>
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_device.h"
> +#include "v4l2_subdevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RkISP1)
> +
> +class PipelineHandlerRkISP1 : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRkISP1(CameraManager *manager);
> +	~PipelineHandlerRkISP1();
> +
> +	CameraConfiguration streamConfiguration(Camera *camera,
> +		const std::vector<StreamUsage> &usages) override;
> +	int configureStreams(Camera *camera,
> +			     const CameraConfiguration &config) override;
> +
> +	int allocateBuffers(Camera *camera, Stream *stream) override;
> +	int freeBuffers(Camera *camera, Stream *stream) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	class RkISP1CameraData : public CameraData
> +	{
> +	public:
> +		RkISP1CameraData(PipelineHandler *pipe)
> +			: CameraData(pipe), sensor_(nullptr)
> +		{
> +		}
> +
> +		~RkISP1CameraData()
> +		{
> +			delete sensor_;
> +		}
> +
> +		Stream stream_;
> +		CameraSensor *sensor_;
> +	};
> +
> +	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
> +
> +	RkISP1CameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RkISP1CameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	int initLinks();
> +	int createCamera(MediaEntity *sensor);
> +	void bufferReady(Buffer *buffer);
> +
> +	std::shared_ptr<MediaDevice> media_;
> +	V4L2Subdevice *dphy_;
> +	V4L2Subdevice *isp_;
> +	V4L2Device *video_;
> +
> +	Camera *activeCamera_;
> +};
> +
> +PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> +	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> +	  video_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
> +{
> +	delete video_;
> +	delete isp_;
> +	delete dphy_;
> +
> +	if (media_)
> +		media_->release();
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Pipeline Operations
> + */
> +
> +CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
> +	const std::vector<StreamUsage> &usages)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	CameraConfiguration configs;
> +	StreamConfiguration config{};
> +
> +	const Size &resolution = data->sensor_->resolution();
> +	config.width = resolution.width;
> +	config.height = resolution.height;
> +	config.pixelFormat = V4L2_PIX_FMT_NV12;
> +	config.bufferCount = RKISP1_BUFFER_COUNT;
> +
> +	configs[&data->stream_] = config;
> +
> +	LOG(RkISP1, Debug)
> +		<< "Stream format set to " << config.width << "x"
> +		<< config.height << "-0x" << std::hex << std::setfill('0')
> +		<< std::setw(8) << config.pixelFormat;
> +
> +	return configs;
> +}
> +
> +int PipelineHandlerRkISP1::configureStreams(Camera *camera,
> +					    const CameraConfiguration &config)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	const StreamConfiguration &cfg = config[&data->stream_];
> +	CameraSensor *sensor = data->sensor_;
> +	int ret;
> +
> +	/* Verify the configuration. */
> +	const Size &resolution = sensor->resolution();
> +	if (cfg.width > resolution.width ||
> +	    cfg.height > resolution.height) {
> +		LOG(RkISP1, Error)
> +			<< "Invalid stream size: larger than sensor resolution";
> +		return -EINVAL;
> +	}
> +
> +	/*
> +	 * Configure the sensor links: enable the link corresponding to this
> +	 * camera and disable all the other sensor links.
> +	 */
> +	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
> +
> +	ret = media_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	for (MediaLink *link : pad->links()) {
> +		bool enable = link->source()->entity() == sensor->entity();
> +
> +		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
> +			continue;
> +
> +		LOG(RkISP1, Debug)
> +			<< (enable ? "Enabling" : "Disabling")
> +			<< " link from sensor '"
> +			<< link->source()->entity()->name()
> +			<< "' to CSI-2 receiver";
> +
> +		ret = link->setEnabled(enable);
> +		if (ret < 0)
> +			break;
> +	}
> +
> +	media_->close();
> +
> +	if (ret < 0)
> +		return ret;
> +
> +	/*
> +	 * Configure the format on the sensor output and propagate it through
> +	 * the pipeline.
> +	 */
> +	V4L2SubdeviceFormat format;
> +	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
> +				     MEDIA_BUS_FMT_SGBRG12_1X12,
> +				     MEDIA_BUS_FMT_SGRBG12_1X12,
> +				     MEDIA_BUS_FMT_SRGGB12_1X12,
> +				     MEDIA_BUS_FMT_SBGGR10_1X10,
> +				     MEDIA_BUS_FMT_SGBRG10_1X10,
> +				     MEDIA_BUS_FMT_SGRBG10_1X10,
> +				     MEDIA_BUS_FMT_SRGGB10_1X10,
> +				     MEDIA_BUS_FMT_SBGGR8_1X8,
> +				     MEDIA_BUS_FMT_SGBRG8_1X8,
> +				     MEDIA_BUS_FMT_SGRBG8_1X8,
> +				     MEDIA_BUS_FMT_SRGGB8_1X8 },
> +				   Size(cfg.width, cfg.height));

nice!

> +
> +	LOG(RkISP1, Debug) << "Configuring sensor for " << format.toString();
> +
> +	ret = sensor->setFormat(&format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->getFormat(1, &format);
> +	if (ret < 0)
> +		return ret;

The format is internally propagated from the dphy_[0] sink to the
dphy_[1] source pad, right? Do you need to get it from [1]? Does it
gets modified when propagated from sink to source?

> +
> +	ret = isp_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;

Maybe a format printout on what gets configured on each subdevice?
I know you think it's too verbose but it helps (possibly more while
developing than when actually using this)

> +
> +	V4L2DeviceFormat outputFormat = {};
> +	outputFormat.width = cfg.width;
> +	outputFormat.height = cfg.height;
> +	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
> +	outputFormat.planesCount = 2;
> +
> +	ret = video_->setFormat(&outputFormat);
> +	if (ret)
> +		return ret;

I would at least print this one, setFormat() could modify the applied
format...

> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
> +{
> +	return video_->exportBuffers(&stream->bufferPool());
> +}
> +
> +int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
> +{
> +	if (video_->releaseBuffers())
> +		LOG(RkISP1, Error) << "Failed to release buffers";
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::start(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOn();
> +	if (ret)
> +		LOG(RkISP1, Error)
> +			<< "Failed to start camera " << camera->name();
> +
> +	activeCamera_ = camera;
> +
> +	return ret;
> +}
> +
> +void PipelineHandlerRkISP1::stop(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOff();
> +	if (ret)
> +		LOG(RkISP1, Warning)
> +			<< "Failed to stop camera " << camera->name();
> +
> +	PipelineHandler::stop(camera);
> +
> +	activeCamera_ = nullptr;
> +}
> +
> +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	Stream *stream = &data->stream_;
> +
> +	Buffer *buffer = request->findBuffer(stream);
> +	if (!buffer) {
> +		LOG(RkISP1, Error)
> +			<< "Attempt to queue request with invalid stream";
> +		return -ENOENT;
> +	}
> +
> +	int ret = video_->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Match and Setup
> + */
> +
> +int PipelineHandlerRkISP1::initLinks()
> +{
> +	MediaLink *link;
> +	int ret;
> +
> +	ret = media_->disableLinks();
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +

This calls for an helper in Links based on ImgUDevice::linkSetup().
Not strictly related to this patch though.

> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> +{
> +	int ret;
> +
> +	std::unique_ptr<RkISP1CameraData> data =
> +		utils::make_unique<RkISP1CameraData>(this);
> +
> +	data->sensor_ = new CameraSensor(sensor);
> +	ret = data->sensor_->init();
> +	if (ret)
> +		return ret;
> +
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, sensor->name(), streams);

Identical sensors will generate an identical camera name?

> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> +{
> +	int ret;
> +
> +	DeviceMatch dm("rkisp1");
> +	dm.add("rkisp1-isp-subdev");
> +	dm.add("rkisp1_selfpath");
> +	dm.add("rkisp1_mainpath");
> +	dm.add("rkisp1-statistics");
> +	dm.add("rkisp1-input-params");
> +	dm.add("rockchip-sy-mipi-dphy");
> +
> +	media_ = enumerator->search(dm);
> +	if (!media_)
> +		return false;
> +
> +	media_->acquire();
> +
> +	ret = media_->open();

The media device should be closed in this function error path.
This class destructor the release, but does not close it.

> +	if (ret < 0)
> +		return ret;
> +
> +	/* Create the V4L2 subdevices we will need. */
> +	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
> +	dphy_ = new V4L2Subdevice(dphy);

You could use V4L2Subdevice::fromEntityName() here and below for isp_.
As you need to access the dphy entity below, you can use your newly
introduced V4L2Subdevice::entity() method to access it.

> +	ret = dphy_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
> +	isp_ = new V4L2Subdevice(isp);
> +	ret = isp_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	/* Locate and open the capture video node. */
> +	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));

And here V4L2Device::fromEntityName()

> +	if (video_->open())
> +		return false;
> +
> +	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> +
> +	/* Configure default links. */
> +	ret = initLinks();
> +	if (ret < 0) {
> +		LOG(RkISP1, Error) << "Failed to setup links";
> +		return false;
> +	}
> +
> +	/*
> +	 * Enumerate all sensors connected to the CSI-2 receiver and create one
> +	 * camera instance for each of them.
> +	 */
> +	const MediaPad *pad = dphy->getPadByIndex(0);
> +	if (!pad) {
> +		ret = -EINVAL;
> +		goto done;
> +	}
> +
> +	for (MediaLink *link : pad->links())
> +		createCamera(link->source()->entity());
> +
> +done:
> +	media_->close();

Ah, you close it here. You might want to jump here instead of
returning false in the error paths here above.

> +
> +	return ret == 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Buffer Handling
> + */
> +
> +void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
> +{
> +	ASSERT(activeCamera_);

Is receiving bufferReady signal after the stop() operation a fatal
error? Cancelled buffers might still complete if capture was started
after S_STREAM(0) arrives?

Nice to see this progressing, and hope to be able to test it soon!
Thanks
  j

> +
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +	Request *request = data->queuedRequests_.front();
> +
> +	completeBuffer(activeCamera_, request, buffer);
> +	completeRequest(activeCamera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> +
> +} /* namespace libcamera */
> --
> Regards,
>
> Laurent Pinchart
>
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel
Niklas Söderlund April 18, 2019, 1:02 a.m. UTC | #2
Hi Laurent,

Thanks for your work.

On 2019-04-17 01:08:39 +0300, Laurent Pinchart wrote:
> The pipeline handler for the Rockchip ISP creates one camera instance
> per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
> are not supported yet.
> 
> As the ISP has a single CSI-2 receiver, only one camera can be used at a
> time. Mutual exclusion isn't implemented yet.
> 
> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>

Reviewed-by: Niklas Söderlund <niklas.soderlund@ragnatech.se>
Tested-by: Niklas Söderlund <niklas.soderlund@ragnatech.se>

> ---
> Changes since v1:
> 
> - Move sensor entity sanity checks to CameraSensor class
> - Remove unneeded LOG() message
> ---
>  src/libcamera/pipeline/meson.build        |   1 +
>  src/libcamera/pipeline/rkisp1/meson.build |   3 +
>  src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 431 ++++++++++++++++++++++
>  3 files changed, 435 insertions(+)
>  create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
>  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp
> 
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 40bb26405b88..0d466225a72e 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -4,3 +4,4 @@ libcamera_sources += files([
>  ])
>  
>  subdir('ipu3')
> +subdir('rkisp1')
> diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> new file mode 100644
> index 000000000000..f1cc4046b5d0
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/meson.build
> @@ -0,0 +1,3 @@
> +libcamera_sources += files([
> +    'rkisp1.cpp',
> +])
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> new file mode 100644
> index 000000000000..d44b546fa008
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> @@ -0,0 +1,431 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * rkisp1.cpp - Pipeline handler for Rockchip ISP1
> + */
> +
> +#include <iomanip>
> +#include <memory>
> +#include <vector>
> +
> +#include <linux/media-bus-format.h>
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_device.h"
> +#include "v4l2_subdevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RkISP1)
> +
> +class PipelineHandlerRkISP1 : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRkISP1(CameraManager *manager);
> +	~PipelineHandlerRkISP1();
> +
> +	CameraConfiguration streamConfiguration(Camera *camera,
> +		const std::vector<StreamUsage> &usages) override;
> +	int configureStreams(Camera *camera,
> +			     const CameraConfiguration &config) override;
> +
> +	int allocateBuffers(Camera *camera, Stream *stream) override;
> +	int freeBuffers(Camera *camera, Stream *stream) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	class RkISP1CameraData : public CameraData
> +	{
> +	public:
> +		RkISP1CameraData(PipelineHandler *pipe)
> +			: CameraData(pipe), sensor_(nullptr)
> +		{
> +		}
> +
> +		~RkISP1CameraData()
> +		{
> +			delete sensor_;
> +		}
> +
> +		Stream stream_;
> +		CameraSensor *sensor_;
> +	};
> +
> +	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
> +
> +	RkISP1CameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RkISP1CameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	int initLinks();
> +	int createCamera(MediaEntity *sensor);
> +	void bufferReady(Buffer *buffer);
> +
> +	std::shared_ptr<MediaDevice> media_;
> +	V4L2Subdevice *dphy_;
> +	V4L2Subdevice *isp_;
> +	V4L2Device *video_;
> +
> +	Camera *activeCamera_;
> +};
> +
> +PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> +	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> +	  video_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
> +{
> +	delete video_;
> +	delete isp_;
> +	delete dphy_;
> +
> +	if (media_)
> +		media_->release();
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Pipeline Operations
> + */
> +
> +CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
> +	const std::vector<StreamUsage> &usages)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	CameraConfiguration configs;
> +	StreamConfiguration config{};
> +
> +	const Size &resolution = data->sensor_->resolution();
> +	config.width = resolution.width;
> +	config.height = resolution.height;
> +	config.pixelFormat = V4L2_PIX_FMT_NV12;
> +	config.bufferCount = RKISP1_BUFFER_COUNT;
> +
> +	configs[&data->stream_] = config;
> +
> +	LOG(RkISP1, Debug)
> +		<< "Stream format set to " << config.width << "x"
> +		<< config.height << "-0x" << std::hex << std::setfill('0')
> +		<< std::setw(8) << config.pixelFormat;
> +
> +	return configs;
> +}
> +
> +int PipelineHandlerRkISP1::configureStreams(Camera *camera,
> +					    const CameraConfiguration &config)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	const StreamConfiguration &cfg = config[&data->stream_];
> +	CameraSensor *sensor = data->sensor_;
> +	int ret;
> +
> +	/* Verify the configuration. */
> +	const Size &resolution = sensor->resolution();
> +	if (cfg.width > resolution.width ||
> +	    cfg.height > resolution.height) {
> +		LOG(RkISP1, Error)
> +			<< "Invalid stream size: larger than sensor resolution";
> +		return -EINVAL;
> +	}
> +
> +	/*
> +	 * Configure the sensor links: enable the link corresponding to this
> +	 * camera and disable all the other sensor links.
> +	 */
> +	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
> +
> +	ret = media_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	for (MediaLink *link : pad->links()) {
> +		bool enable = link->source()->entity() == sensor->entity();
> +
> +		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
> +			continue;
> +
> +		LOG(RkISP1, Debug)
> +			<< (enable ? "Enabling" : "Disabling")
> +			<< " link from sensor '"
> +			<< link->source()->entity()->name()
> +			<< "' to CSI-2 receiver";
> +
> +		ret = link->setEnabled(enable);
> +		if (ret < 0)
> +			break;
> +	}
> +
> +	media_->close();
> +
> +	if (ret < 0)
> +		return ret;
> +
> +	/*
> +	 * Configure the format on the sensor output and propagate it through
> +	 * the pipeline.
> +	 */
> +	V4L2SubdeviceFormat format;
> +	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
> +				     MEDIA_BUS_FMT_SGBRG12_1X12,
> +				     MEDIA_BUS_FMT_SGRBG12_1X12,
> +				     MEDIA_BUS_FMT_SRGGB12_1X12,
> +				     MEDIA_BUS_FMT_SBGGR10_1X10,
> +				     MEDIA_BUS_FMT_SGBRG10_1X10,
> +				     MEDIA_BUS_FMT_SGRBG10_1X10,
> +				     MEDIA_BUS_FMT_SRGGB10_1X10,
> +				     MEDIA_BUS_FMT_SBGGR8_1X8,
> +				     MEDIA_BUS_FMT_SGBRG8_1X8,
> +				     MEDIA_BUS_FMT_SGRBG8_1X8,
> +				     MEDIA_BUS_FMT_SRGGB8_1X8 },
> +				   Size(cfg.width, cfg.height));
> +
> +	LOG(RkISP1, Debug) << "Configuring sensor for " << format.toString();
> +
> +	ret = sensor->setFormat(&format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = dphy_->getFormat(1, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	ret = isp_->setFormat(0, &format);
> +	if (ret < 0)
> +		return ret;
> +
> +	V4L2DeviceFormat outputFormat = {};
> +	outputFormat.width = cfg.width;
> +	outputFormat.height = cfg.height;
> +	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
> +	outputFormat.planesCount = 2;
> +
> +	ret = video_->setFormat(&outputFormat);
> +	if (ret)
> +		return ret;
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
> +{
> +	return video_->exportBuffers(&stream->bufferPool());
> +}
> +
> +int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
> +{
> +	if (video_->releaseBuffers())
> +		LOG(RkISP1, Error) << "Failed to release buffers";
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::start(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOn();
> +	if (ret)
> +		LOG(RkISP1, Error)
> +			<< "Failed to start camera " << camera->name();
> +
> +	activeCamera_ = camera;
> +
> +	return ret;
> +}
> +
> +void PipelineHandlerRkISP1::stop(Camera *camera)
> +{
> +	int ret;
> +
> +	ret = video_->streamOff();
> +	if (ret)
> +		LOG(RkISP1, Warning)
> +			<< "Failed to stop camera " << camera->name();
> +
> +	PipelineHandler::stop(camera);
> +
> +	activeCamera_ = nullptr;
> +}
> +
> +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
> +{
> +	RkISP1CameraData *data = cameraData(camera);
> +	Stream *stream = &data->stream_;
> +
> +	Buffer *buffer = request->findBuffer(stream);
> +	if (!buffer) {
> +		LOG(RkISP1, Error)
> +			<< "Attempt to queue request with invalid stream";
> +		return -ENOENT;
> +	}
> +
> +	int ret = video_->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Match and Setup
> + */
> +
> +int PipelineHandlerRkISP1::initLinks()
> +{
> +	MediaLink *link;
> +	int ret;
> +
> +	ret = media_->disableLinks();
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +
> +	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
> +	if (!link)
> +		return -ENODEV;
> +
> +	ret = link->setEnabled(true);
> +	if (ret < 0)
> +		return ret;
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> +{
> +	int ret;
> +
> +	std::unique_ptr<RkISP1CameraData> data =
> +		utils::make_unique<RkISP1CameraData>(this);
> +
> +	data->sensor_ = new CameraSensor(sensor);
> +	ret = data->sensor_->init();
> +	if (ret)
> +		return ret;
> +
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, sensor->name(), streams);
> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> +{
> +	int ret;
> +
> +	DeviceMatch dm("rkisp1");
> +	dm.add("rkisp1-isp-subdev");
> +	dm.add("rkisp1_selfpath");
> +	dm.add("rkisp1_mainpath");
> +	dm.add("rkisp1-statistics");
> +	dm.add("rkisp1-input-params");
> +	dm.add("rockchip-sy-mipi-dphy");
> +
> +	media_ = enumerator->search(dm);
> +	if (!media_)
> +		return false;
> +
> +	media_->acquire();
> +
> +	ret = media_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	/* Create the V4L2 subdevices we will need. */
> +	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
> +	dphy_ = new V4L2Subdevice(dphy);
> +	ret = dphy_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
> +	isp_ = new V4L2Subdevice(isp);
> +	ret = isp_->open();
> +	if (ret < 0)
> +		return ret;
> +
> +	/* Locate and open the capture video node. */
> +	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
> +	if (video_->open())
> +		return false;
> +
> +	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> +
> +	/* Configure default links. */
> +	ret = initLinks();
> +	if (ret < 0) {
> +		LOG(RkISP1, Error) << "Failed to setup links";
> +		return false;
> +	}
> +
> +	/*
> +	 * Enumerate all sensors connected to the CSI-2 receiver and create one
> +	 * camera instance for each of them.
> +	 */
> +	const MediaPad *pad = dphy->getPadByIndex(0);
> +	if (!pad) {
> +		ret = -EINVAL;
> +		goto done;
> +	}
> +
> +	for (MediaLink *link : pad->links())
> +		createCamera(link->source()->entity());
> +
> +done:
> +	media_->close();
> +
> +	return ret == 0;
> +}
> +
> +/* -----------------------------------------------------------------------------
> + * Buffer Handling
> + */
> +
> +void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
> +{
> +	ASSERT(activeCamera_);
> +
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +	Request *request = data->queuedRequests_.front();
> +
> +	completeBuffer(activeCamera_, request, buffer);
> +	completeRequest(activeCamera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> +
> +} /* namespace libcamera */
> -- 
> Regards,
> 
> Laurent Pinchart
> 
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel
Laurent Pinchart April 18, 2019, 1:53 p.m. UTC | #3
Hi Jacopo,

On Wed, Apr 17, 2019 at 02:32:26PM +0200, Jacopo Mondi wrote:
> On Wed, Apr 17, 2019 at 01:08:39AM +0300, Laurent Pinchart wrote:
> > The pipeline handler for the Rockchip ISP creates one camera instance
> > per detected raw Bayer CSI-2 sensor. Parallel sensors and YUV sensors
> > are not supported yet.
> >
> > As the ISP has a single CSI-2 receiver, only one camera can be used at a
> > time. Mutual exclusion isn't implemented yet.
> >
> > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> > ---
> > Changes since v1:
> >
> > - Move sensor entity sanity checks to CameraSensor class
> > - Remove unneeded LOG() message
> > ---
> >  src/libcamera/pipeline/meson.build        |   1 +
> >  src/libcamera/pipeline/rkisp1/meson.build |   3 +
> >  src/libcamera/pipeline/rkisp1/rkisp1.cpp  | 431 ++++++++++++++++++++++
> >  3 files changed, 435 insertions(+)
> >  create mode 100644 src/libcamera/pipeline/rkisp1/meson.build
> >  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.cpp
> >
> > diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> > index 40bb26405b88..0d466225a72e 100644
> > --- a/src/libcamera/pipeline/meson.build
> > +++ b/src/libcamera/pipeline/meson.build
> > @@ -4,3 +4,4 @@ libcamera_sources += files([
> >  ])
> >
> >  subdir('ipu3')
> > +subdir('rkisp1')
> > diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> > new file mode 100644
> > index 000000000000..f1cc4046b5d0
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/rkisp1/meson.build
> > @@ -0,0 +1,3 @@
> > +libcamera_sources += files([
> > +    'rkisp1.cpp',
> > +])
> > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > new file mode 100644
> > index 000000000000..d44b546fa008
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > @@ -0,0 +1,431 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2019, Google Inc.
> > + *
> > + * rkisp1.cpp - Pipeline handler for Rockchip ISP1
> > + */
> > +
> > +#include <iomanip>
> > +#include <memory>
> > +#include <vector>
> > +
> > +#include <linux/media-bus-format.h>
> > +
> > +#include <libcamera/camera.h>
> > +#include <libcamera/request.h>
> > +#include <libcamera/stream.h>
> > +
> > +#include "camera_sensor.h"
> > +#include "device_enumerator.h"
> > +#include "log.h"
> > +#include "media_device.h"
> > +#include "pipeline_handler.h"
> > +#include "utils.h"
> > +#include "v4l2_device.h"
> > +#include "v4l2_subdevice.h"
> > +
> > +namespace libcamera {
> > +
> > +LOG_DEFINE_CATEGORY(RkISP1)
> > +
> > +class PipelineHandlerRkISP1 : public PipelineHandler
> > +{
> > +public:
> > +	PipelineHandlerRkISP1(CameraManager *manager);
> > +	~PipelineHandlerRkISP1();
> > +
> > +	CameraConfiguration streamConfiguration(Camera *camera,
> > +		const std::vector<StreamUsage> &usages) override;
> > +	int configureStreams(Camera *camera,
> > +			     const CameraConfiguration &config) override;
> > +
> > +	int allocateBuffers(Camera *camera, Stream *stream) override;
> > +	int freeBuffers(Camera *camera, Stream *stream) override;
> > +
> > +	int start(Camera *camera) override;
> > +	void stop(Camera *camera) override;
> > +
> > +	int queueRequest(Camera *camera, Request *request) override;
> > +
> > +	bool match(DeviceEnumerator *enumerator) override;
> > +
> > +private:
> > +	class RkISP1CameraData : public CameraData
> > +	{
> > +	public:
> > +		RkISP1CameraData(PipelineHandler *pipe)
> > +			: CameraData(pipe), sensor_(nullptr)
> > +		{
> > +		}
> > +
> > +		~RkISP1CameraData()
> > +		{
> > +			delete sensor_;
> > +		}
> > +
> > +		Stream stream_;
> > +		CameraSensor *sensor_;
> > +	};
> > +
> > +	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
> > +
> > +	RkISP1CameraData *cameraData(const Camera *camera)
> > +	{
> > +		return static_cast<RkISP1CameraData *>(
> > +			PipelineHandler::cameraData(camera));
> > +	}
> > +
> > +	int initLinks();
> > +	int createCamera(MediaEntity *sensor);
> > +	void bufferReady(Buffer *buffer);
> > +
> > +	std::shared_ptr<MediaDevice> media_;
> > +	V4L2Subdevice *dphy_;
> > +	V4L2Subdevice *isp_;
> > +	V4L2Device *video_;
> > +
> > +	Camera *activeCamera_;
> > +};
> > +
> > +PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> > +	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> > +	  video_(nullptr)
> > +{
> > +}
> > +
> > +PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
> > +{
> > +	delete video_;
> > +	delete isp_;
> > +	delete dphy_;
> > +
> > +	if (media_)
> > +		media_->release();
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Pipeline Operations
> > + */
> > +
> > +CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
> > +	const std::vector<StreamUsage> &usages)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	CameraConfiguration configs;
> > +	StreamConfiguration config{};
> > +
> > +	const Size &resolution = data->sensor_->resolution();
> > +	config.width = resolution.width;
> > +	config.height = resolution.height;
> > +	config.pixelFormat = V4L2_PIX_FMT_NV12;
> > +	config.bufferCount = RKISP1_BUFFER_COUNT;
> > +
> > +	configs[&data->stream_] = config;
> > +
> > +	LOG(RkISP1, Debug)
> > +		<< "Stream format set to " << config.width << "x"
> > +		<< config.height << "-0x" << std::hex << std::setfill('0')
> > +		<< std::setw(8) << config.pixelFormat;
> > +
> > +	return configs;
> > +}
> > +
> > +int PipelineHandlerRkISP1::configureStreams(Camera *camera,
> > +					    const CameraConfiguration &config)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	const StreamConfiguration &cfg = config[&data->stream_];
> > +	CameraSensor *sensor = data->sensor_;
> > +	int ret;
> > +
> > +	/* Verify the configuration. */
> > +	const Size &resolution = sensor->resolution();
> > +	if (cfg.width > resolution.width ||
> > +	    cfg.height > resolution.height) {
> > +		LOG(RkISP1, Error)
> > +			<< "Invalid stream size: larger than sensor resolution";
> > +		return -EINVAL;
> > +	}
> > +
> > +	/*
> > +	 * Configure the sensor links: enable the link corresponding to this
> > +	 * camera and disable all the other sensor links.
> > +	 */
> > +	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
> > +
> > +	ret = media_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	for (MediaLink *link : pad->links()) {
> > +		bool enable = link->source()->entity() == sensor->entity();
> > +
> > +		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
> > +			continue;
> > +
> > +		LOG(RkISP1, Debug)
> > +			<< (enable ? "Enabling" : "Disabling")
> > +			<< " link from sensor '"
> > +			<< link->source()->entity()->name()
> > +			<< "' to CSI-2 receiver";
> > +
> > +		ret = link->setEnabled(enable);
> > +		if (ret < 0)
> > +			break;
> > +	}
> > +
> > +	media_->close();
> > +
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/*
> > +	 * Configure the format on the sensor output and propagate it through
> > +	 * the pipeline.
> > +	 */
> > +	V4L2SubdeviceFormat format;
> > +	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
> > +				     MEDIA_BUS_FMT_SGBRG12_1X12,
> > +				     MEDIA_BUS_FMT_SGRBG12_1X12,
> > +				     MEDIA_BUS_FMT_SRGGB12_1X12,
> > +				     MEDIA_BUS_FMT_SBGGR10_1X10,
> > +				     MEDIA_BUS_FMT_SGBRG10_1X10,
> > +				     MEDIA_BUS_FMT_SGRBG10_1X10,
> > +				     MEDIA_BUS_FMT_SRGGB10_1X10,
> > +				     MEDIA_BUS_FMT_SBGGR8_1X8,
> > +				     MEDIA_BUS_FMT_SGBRG8_1X8,
> > +				     MEDIA_BUS_FMT_SGRBG8_1X8,
> > +				     MEDIA_BUS_FMT_SRGGB8_1X8 },
> > +				   Size(cfg.width, cfg.height));
> 
> nice!
> 
> > +
> > +	LOG(RkISP1, Debug) << "Configuring sensor for " << format.toString();
> > +
> > +	ret = sensor->setFormat(&format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	ret = dphy_->setFormat(0, &format);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	ret = dphy_->getFormat(1, &format);
> > +	if (ret < 0)
> > +		return ret;
> 
> The format is internally propagated from the dphy_[0] sink to the
> dphy_[1] source pad, right?

Correct.

> Do you need to get it from [1]? Does it gets modified when propagated
> from sink to source?

No, because it doesn't get modified.

> > +
> > +	ret = isp_->setFormat(0, &format);
> > +	if (ret < 0)
> > +		return ret;
> 
> Maybe a format printout on what gets configured on each subdevice?
> I know you think it's too verbose but it helps (possibly more while
> developing than when actually using this)

I'll add that for the sensor output. The CSI-2 receiver input and
output, and the ISP input, should not modify the format.

> > +
> > +	V4L2DeviceFormat outputFormat = {};
> > +	outputFormat.width = cfg.width;
> > +	outputFormat.height = cfg.height;
> > +	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
> > +	outputFormat.planesCount = 2;
> > +
> > +	ret = video_->setFormat(&outputFormat);
> > +	if (ret)
> > +		return ret;
> 
> I would at least print this one, setFormat() could modify the applied
> format...

I'll actually add an error check to make sure we get what we expect.

> > +
> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
> > +{
> > +	return video_->exportBuffers(&stream->bufferPool());
> > +}
> > +
> > +int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
> > +{
> > +	if (video_->releaseBuffers())
> > +		LOG(RkISP1, Error) << "Failed to release buffers";
> > +
> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::start(Camera *camera)
> > +{
> > +	int ret;
> > +
> > +	ret = video_->streamOn();
> > +	if (ret)
> > +		LOG(RkISP1, Error)
> > +			<< "Failed to start camera " << camera->name();
> > +
> > +	activeCamera_ = camera;
> > +
> > +	return ret;
> > +}
> > +
> > +void PipelineHandlerRkISP1::stop(Camera *camera)
> > +{
> > +	int ret;
> > +
> > +	ret = video_->streamOff();
> > +	if (ret)
> > +		LOG(RkISP1, Warning)
> > +			<< "Failed to stop camera " << camera->name();
> > +
> > +	PipelineHandler::stop(camera);
> > +
> > +	activeCamera_ = nullptr;
> > +}
> > +
> > +int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
> > +{
> > +	RkISP1CameraData *data = cameraData(camera);
> > +	Stream *stream = &data->stream_;
> > +
> > +	Buffer *buffer = request->findBuffer(stream);
> > +	if (!buffer) {
> > +		LOG(RkISP1, Error)
> > +			<< "Attempt to queue request with invalid stream";
> > +		return -ENOENT;
> > +	}
> > +
> > +	int ret = video_->queueBuffer(buffer);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	PipelineHandler::queueRequest(camera, request);
> > +
> > +	return 0;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Match and Setup
> > + */
> > +
> > +int PipelineHandlerRkISP1::initLinks()
> > +{
> > +	MediaLink *link;
> > +	int ret;
> > +
> > +	ret = media_->disableLinks();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
> > +	if (!link)
> > +		return -ENODEV;
> > +
> > +	ret = link->setEnabled(true);
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
> > +	if (!link)
> > +		return -ENODEV;
> > +
> > +	ret = link->setEnabled(true);
> > +	if (ret < 0)
> > +		return ret;
> > +
> 
> This calls for an helper in Links based on ImgUDevice::linkSetup().
> Not strictly related to this patch though.

Possibly, but it's a bit more efficient for pipeline handlers to cache
the links they reuse. As this is a one-time operation I don't keep the
link pointer around. Let's see how it will turn out with multiple
streams support.

> > +	return 0;
> > +}
> > +
> > +int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> > +{
> > +	int ret;
> > +
> > +	std::unique_ptr<RkISP1CameraData> data =
> > +		utils::make_unique<RkISP1CameraData>(this);
> > +
> > +	data->sensor_ = new CameraSensor(sensor);
> > +	ret = data->sensor_->init();
> > +	if (ret)
> > +		return ret;
> > +
> > +	std::set<Stream *> streams{ &data->stream_ };
> > +	std::shared_ptr<Camera> camera =
> > +		Camera::create(this, sensor->name(), streams);
> 
> Identical sensors will generate an identical camera name?
> 
> > +	registerCamera(std::move(camera), std::move(data));
> > +
> > +	return 0;
> > +}
> > +
> > +bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > +{
> > +	int ret;
> > +
> > +	DeviceMatch dm("rkisp1");
> > +	dm.add("rkisp1-isp-subdev");
> > +	dm.add("rkisp1_selfpath");
> > +	dm.add("rkisp1_mainpath");
> > +	dm.add("rkisp1-statistics");
> > +	dm.add("rkisp1-input-params");
> > +	dm.add("rockchip-sy-mipi-dphy");
> > +
> > +	media_ = enumerator->search(dm);
> > +	if (!media_)
> > +		return false;
> > +
> > +	media_->acquire();
> > +
> > +	ret = media_->open();
> 
> The media device should be closed in this function error path.
> This class destructor the release, but does not close it.

You're right, I'll fix that.

> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/* Create the V4L2 subdevices we will need. */
> > +	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
> > +	dphy_ = new V4L2Subdevice(dphy);
> 
> You could use V4L2Subdevice::fromEntityName() here and below for isp_.
> As you need to access the dphy entity below, you can use your newly
> introduced V4L2Subdevice::entity() method to access it.

I'll fix this too.

> > +	ret = dphy_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
> > +	isp_ = new V4L2Subdevice(isp);
> > +	ret = isp_->open();
> > +	if (ret < 0)
> > +		return ret;
> > +
> > +	/* Locate and open the capture video node. */
> > +	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
> 
> And here V4L2Device::fromEntityName()

And this too.

> > +	if (video_->open())
> > +		return false;
> > +
> > +	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> > +
> > +	/* Configure default links. */
> > +	ret = initLinks();
> > +	if (ret < 0) {
> > +		LOG(RkISP1, Error) << "Failed to setup links";
> > +		return false;
> > +	}
> > +
> > +	/*
> > +	 * Enumerate all sensors connected to the CSI-2 receiver and create one
> > +	 * camera instance for each of them.
> > +	 */
> > +	const MediaPad *pad = dphy->getPadByIndex(0);
> > +	if (!pad) {
> > +		ret = -EINVAL;
> > +		goto done;
> > +	}
> > +
> > +	for (MediaLink *link : pad->links())
> > +		createCamera(link->source()->entity());
> > +
> > +done:
> > +	media_->close();
> 
> Ah, you close it here. You might want to jump here instead of
> returning false in the error paths here above.

Yes.

> > +
> > +	return ret == 0;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Buffer Handling
> > + */
> > +
> > +void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
> > +{
> > +	ASSERT(activeCamera_);
> 
> Is receiving bufferReady signal after the stop() operation a fatal
> error? Cancelled buffers might still complete if capture was started
> after S_STREAM(0) arrives?

This shouldn't be the case, as stream on/off and queue/dequeue are all
from the same thread. Do you think there could be an issue here ?

> Nice to see this progressing, and hope to be able to test it soon!

Be my guest ;-)

> > +
> > +	RkISP1CameraData *data = cameraData(activeCamera_);
> > +	Request *request = data->queuedRequests_.front();
> > +
> > +	completeBuffer(activeCamera_, request, buffer);
> > +	completeRequest(activeCamera_, request);
> > +}
> > +
> > +REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> > +
> > +} /* namespace libcamera */

Patch

diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
index 40bb26405b88..0d466225a72e 100644
--- a/src/libcamera/pipeline/meson.build
+++ b/src/libcamera/pipeline/meson.build
@@ -4,3 +4,4 @@  libcamera_sources += files([
 ])
 
 subdir('ipu3')
+subdir('rkisp1')
diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
new file mode 100644
index 000000000000..f1cc4046b5d0
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/meson.build
@@ -0,0 +1,3 @@ 
+libcamera_sources += files([
+    'rkisp1.cpp',
+])
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
new file mode 100644
index 000000000000..d44b546fa008
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -0,0 +1,431 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * rkisp1.cpp - Pipeline handler for Rockchip ISP1
+ */
+
+#include <iomanip>
+#include <memory>
+#include <vector>
+
+#include <linux/media-bus-format.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+#include "camera_sensor.h"
+#include "device_enumerator.h"
+#include "log.h"
+#include "media_device.h"
+#include "pipeline_handler.h"
+#include "utils.h"
+#include "v4l2_device.h"
+#include "v4l2_subdevice.h"
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(RkISP1)
+
+class PipelineHandlerRkISP1 : public PipelineHandler
+{
+public:
+	PipelineHandlerRkISP1(CameraManager *manager);
+	~PipelineHandlerRkISP1();
+
+	CameraConfiguration streamConfiguration(Camera *camera,
+		const std::vector<StreamUsage> &usages) override;
+	int configureStreams(Camera *camera,
+			     const CameraConfiguration &config) override;
+
+	int allocateBuffers(Camera *camera, Stream *stream) override;
+	int freeBuffers(Camera *camera, Stream *stream) override;
+
+	int start(Camera *camera) override;
+	void stop(Camera *camera) override;
+
+	int queueRequest(Camera *camera, Request *request) override;
+
+	bool match(DeviceEnumerator *enumerator) override;
+
+private:
+	class RkISP1CameraData : public CameraData
+	{
+	public:
+		RkISP1CameraData(PipelineHandler *pipe)
+			: CameraData(pipe), sensor_(nullptr)
+		{
+		}
+
+		~RkISP1CameraData()
+		{
+			delete sensor_;
+		}
+
+		Stream stream_;
+		CameraSensor *sensor_;
+	};
+
+	static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
+
+	RkISP1CameraData *cameraData(const Camera *camera)
+	{
+		return static_cast<RkISP1CameraData *>(
+			PipelineHandler::cameraData(camera));
+	}
+
+	int initLinks();
+	int createCamera(MediaEntity *sensor);
+	void bufferReady(Buffer *buffer);
+
+	std::shared_ptr<MediaDevice> media_;
+	V4L2Subdevice *dphy_;
+	V4L2Subdevice *isp_;
+	V4L2Device *video_;
+
+	Camera *activeCamera_;
+};
+
+PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
+	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
+	  video_(nullptr)
+{
+}
+
+PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
+{
+	delete video_;
+	delete isp_;
+	delete dphy_;
+
+	if (media_)
+		media_->release();
+}
+
+/* -----------------------------------------------------------------------------
+ * Pipeline Operations
+ */
+
+CameraConfiguration PipelineHandlerRkISP1::streamConfiguration(Camera *camera,
+	const std::vector<StreamUsage> &usages)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	CameraConfiguration configs;
+	StreamConfiguration config{};
+
+	const Size &resolution = data->sensor_->resolution();
+	config.width = resolution.width;
+	config.height = resolution.height;
+	config.pixelFormat = V4L2_PIX_FMT_NV12;
+	config.bufferCount = RKISP1_BUFFER_COUNT;
+
+	configs[&data->stream_] = config;
+
+	LOG(RkISP1, Debug)
+		<< "Stream format set to " << config.width << "x"
+		<< config.height << "-0x" << std::hex << std::setfill('0')
+		<< std::setw(8) << config.pixelFormat;
+
+	return configs;
+}
+
+int PipelineHandlerRkISP1::configureStreams(Camera *camera,
+					    const CameraConfiguration &config)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	const StreamConfiguration &cfg = config[&data->stream_];
+	CameraSensor *sensor = data->sensor_;
+	int ret;
+
+	/* Verify the configuration. */
+	const Size &resolution = sensor->resolution();
+	if (cfg.width > resolution.width ||
+	    cfg.height > resolution.height) {
+		LOG(RkISP1, Error)
+			<< "Invalid stream size: larger than sensor resolution";
+		return -EINVAL;
+	}
+
+	/*
+	 * Configure the sensor links: enable the link corresponding to this
+	 * camera and disable all the other sensor links.
+	 */
+	const MediaPad *pad = dphy_->entity()->getPadByIndex(0);
+
+	ret = media_->open();
+	if (ret < 0)
+		return ret;
+
+	for (MediaLink *link : pad->links()) {
+		bool enable = link->source()->entity() == sensor->entity();
+
+		if (!!(link->flags() & MEDIA_LNK_FL_ENABLED) == enable)
+			continue;
+
+		LOG(RkISP1, Debug)
+			<< (enable ? "Enabling" : "Disabling")
+			<< " link from sensor '"
+			<< link->source()->entity()->name()
+			<< "' to CSI-2 receiver";
+
+		ret = link->setEnabled(enable);
+		if (ret < 0)
+			break;
+	}
+
+	media_->close();
+
+	if (ret < 0)
+		return ret;
+
+	/*
+	 * Configure the format on the sensor output and propagate it through
+	 * the pipeline.
+	 */
+	V4L2SubdeviceFormat format;
+	format = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12,
+				     MEDIA_BUS_FMT_SGBRG12_1X12,
+				     MEDIA_BUS_FMT_SGRBG12_1X12,
+				     MEDIA_BUS_FMT_SRGGB12_1X12,
+				     MEDIA_BUS_FMT_SBGGR10_1X10,
+				     MEDIA_BUS_FMT_SGBRG10_1X10,
+				     MEDIA_BUS_FMT_SGRBG10_1X10,
+				     MEDIA_BUS_FMT_SRGGB10_1X10,
+				     MEDIA_BUS_FMT_SBGGR8_1X8,
+				     MEDIA_BUS_FMT_SGBRG8_1X8,
+				     MEDIA_BUS_FMT_SGRBG8_1X8,
+				     MEDIA_BUS_FMT_SRGGB8_1X8 },
+				   Size(cfg.width, cfg.height));
+
+	LOG(RkISP1, Debug) << "Configuring sensor for " << format.toString();
+
+	ret = sensor->setFormat(&format);
+	if (ret < 0)
+		return ret;
+
+	ret = dphy_->setFormat(0, &format);
+	if (ret < 0)
+		return ret;
+
+	ret = dphy_->getFormat(1, &format);
+	if (ret < 0)
+		return ret;
+
+	ret = isp_->setFormat(0, &format);
+	if (ret < 0)
+		return ret;
+
+	V4L2DeviceFormat outputFormat = {};
+	outputFormat.width = cfg.width;
+	outputFormat.height = cfg.height;
+	outputFormat.fourcc = V4L2_PIX_FMT_NV12;
+	outputFormat.planesCount = 2;
+
+	ret = video_->setFormat(&outputFormat);
+	if (ret)
+		return ret;
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::allocateBuffers(Camera *camera, Stream *stream)
+{
+	return video_->exportBuffers(&stream->bufferPool());
+}
+
+int PipelineHandlerRkISP1::freeBuffers(Camera *camera, Stream *stream)
+{
+	if (video_->releaseBuffers())
+		LOG(RkISP1, Error) << "Failed to release buffers";
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::start(Camera *camera)
+{
+	int ret;
+
+	ret = video_->streamOn();
+	if (ret)
+		LOG(RkISP1, Error)
+			<< "Failed to start camera " << camera->name();
+
+	activeCamera_ = camera;
+
+	return ret;
+}
+
+void PipelineHandlerRkISP1::stop(Camera *camera)
+{
+	int ret;
+
+	ret = video_->streamOff();
+	if (ret)
+		LOG(RkISP1, Warning)
+			<< "Failed to stop camera " << camera->name();
+
+	PipelineHandler::stop(camera);
+
+	activeCamera_ = nullptr;
+}
+
+int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
+{
+	RkISP1CameraData *data = cameraData(camera);
+	Stream *stream = &data->stream_;
+
+	Buffer *buffer = request->findBuffer(stream);
+	if (!buffer) {
+		LOG(RkISP1, Error)
+			<< "Attempt to queue request with invalid stream";
+		return -ENOENT;
+	}
+
+	int ret = video_->queueBuffer(buffer);
+	if (ret < 0)
+		return ret;
+
+	PipelineHandler::queueRequest(camera, request);
+
+	return 0;
+}
+
+/* -----------------------------------------------------------------------------
+ * Match and Setup
+ */
+
+int PipelineHandlerRkISP1::initLinks()
+{
+	MediaLink *link;
+	int ret;
+
+	ret = media_->disableLinks();
+	if (ret < 0)
+		return ret;
+
+	link = media_->link("rockchip-sy-mipi-dphy", 1, "rkisp1-isp-subdev", 0);
+	if (!link)
+		return -ENODEV;
+
+	ret = link->setEnabled(true);
+	if (ret < 0)
+		return ret;
+
+	link = media_->link("rkisp1-isp-subdev", 2, "rkisp1_mainpath", 0);
+	if (!link)
+		return -ENODEV;
+
+	ret = link->setEnabled(true);
+	if (ret < 0)
+		return ret;
+
+	return 0;
+}
+
+int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
+{
+	int ret;
+
+	std::unique_ptr<RkISP1CameraData> data =
+		utils::make_unique<RkISP1CameraData>(this);
+
+	data->sensor_ = new CameraSensor(sensor);
+	ret = data->sensor_->init();
+	if (ret)
+		return ret;
+
+	std::set<Stream *> streams{ &data->stream_ };
+	std::shared_ptr<Camera> camera =
+		Camera::create(this, sensor->name(), streams);
+	registerCamera(std::move(camera), std::move(data));
+
+	return 0;
+}
+
+bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
+{
+	int ret;
+
+	DeviceMatch dm("rkisp1");
+	dm.add("rkisp1-isp-subdev");
+	dm.add("rkisp1_selfpath");
+	dm.add("rkisp1_mainpath");
+	dm.add("rkisp1-statistics");
+	dm.add("rkisp1-input-params");
+	dm.add("rockchip-sy-mipi-dphy");
+
+	media_ = enumerator->search(dm);
+	if (!media_)
+		return false;
+
+	media_->acquire();
+
+	ret = media_->open();
+	if (ret < 0)
+		return ret;
+
+	/* Create the V4L2 subdevices we will need. */
+	MediaEntity *dphy = media_->getEntityByName("rockchip-sy-mipi-dphy");
+	dphy_ = new V4L2Subdevice(dphy);
+	ret = dphy_->open();
+	if (ret < 0)
+		return ret;
+
+	MediaEntity *isp = media_->getEntityByName("rkisp1-isp-subdev");
+	isp_ = new V4L2Subdevice(isp);
+	ret = isp_->open();
+	if (ret < 0)
+		return ret;
+
+	/* Locate and open the capture video node. */
+	video_ = new V4L2Device(media_->getEntityByName("rkisp1_mainpath"));
+	if (video_->open())
+		return false;
+
+	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
+
+	/* Configure default links. */
+	ret = initLinks();
+	if (ret < 0) {
+		LOG(RkISP1, Error) << "Failed to setup links";
+		return false;
+	}
+
+	/*
+	 * Enumerate all sensors connected to the CSI-2 receiver and create one
+	 * camera instance for each of them.
+	 */
+	const MediaPad *pad = dphy->getPadByIndex(0);
+	if (!pad) {
+		ret = -EINVAL;
+		goto done;
+	}
+
+	for (MediaLink *link : pad->links())
+		createCamera(link->source()->entity());
+
+done:
+	media_->close();
+
+	return ret == 0;
+}
+
+/* -----------------------------------------------------------------------------
+ * Buffer Handling
+ */
+
+void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
+{
+	ASSERT(activeCamera_);
+
+	RkISP1CameraData *data = cameraData(activeCamera_);
+	Request *request = data->queuedRequests_.front();
+
+	completeBuffer(activeCamera_, request, buffer);
+	completeRequest(activeCamera_, request);
+}
+
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
+
+} /* namespace libcamera */