[libcamera-devel,v5,10/10] libcamera: pipeline: rkisp1: Attach to an IPA

Message ID 20191008004534.1585142-11-niklas.soderlund@ragnatech.se
State Superseded
Headers show
Series
  • libcamera: ipa: Add basic IPA support
Related show

Commit Message

Niklas Söderlund Oct. 8, 2019, 12:45 a.m. UTC
Add the plumbing to the pipeline handler to interact with an IPA module.
This change makes the usage of an IPA module mandatory for the rkisp1
pipeline.

The RkISP1 pipeline handler makes use of a timeline component to
schedule actions. This might be useful for other pipeline handlers going
forward so keep the generic timeline implementation separate to make it
easy to break out.

Signed-off-by: Niklas Söderlund <niklas.soderlund@ragnatech.se>
---
 src/libcamera/pipeline/rkisp1/meson.build     |   2 +
 .../pipeline/rkisp1/rkisp1-timeline.cpp       |  51 +++
 src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 328 +++++++++++++++++-
 src/libcamera/pipeline/rkisp1/rkisp1.h        |  78 +++++
 src/libcamera/pipeline/rkisp1/timeline.cpp    | 229 ++++++++++++
 src/libcamera/pipeline/rkisp1/timeline.h      |  72 ++++
 6 files changed, 747 insertions(+), 13 deletions(-)
 create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
 create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.h
 create mode 100644 src/libcamera/pipeline/rkisp1/timeline.cpp
 create mode 100644 src/libcamera/pipeline/rkisp1/timeline.h

Comments

Jacopo Mondi Oct. 8, 2019, 12:58 p.m. UTC | #1
Hi Niklas,

On Tue, Oct 08, 2019 at 02:45:34AM +0200, Niklas Söderlund wrote:
> Add the plumbing to the pipeline handler to interact with an IPA module.
> This change makes the usage of an IPA module mandatory for the rkisp1
> pipeline.
>
> The RkISP1 pipeline handler makes use of a timeline component to
> schedule actions. This might be useful for other pipeline handlers going
> forward so keep the generic timeline implementation separate to make it
> easy to break out.
>
> Signed-off-by: Niklas Söderlund <niklas.soderlund@ragnatech.se>
> ---
>  src/libcamera/pipeline/rkisp1/meson.build     |   2 +
>  .../pipeline/rkisp1/rkisp1-timeline.cpp       |  51 +++
>  src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 328 +++++++++++++++++-
>  src/libcamera/pipeline/rkisp1/rkisp1.h        |  78 +++++
>  src/libcamera/pipeline/rkisp1/timeline.cpp    | 229 ++++++++++++
>  src/libcamera/pipeline/rkisp1/timeline.h      |  72 ++++
>  6 files changed, 747 insertions(+), 13 deletions(-)
>  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
>  create mode 100644 src/libcamera/pipeline/rkisp1/rkisp1.h
>  create mode 100644 src/libcamera/pipeline/rkisp1/timeline.cpp
>  create mode 100644 src/libcamera/pipeline/rkisp1/timeline.h
>
> diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
> index f1cc4046b5d064cb..7791e209175d95a9 100644
> --- a/src/libcamera/pipeline/rkisp1/meson.build
> +++ b/src/libcamera/pipeline/rkisp1/meson.build
> @@ -1,3 +1,5 @@
>  libcamera_sources += files([
>      'rkisp1.cpp',
> +    'rkisp1-timeline.cpp',
> +    'timeline.cpp',
>  ])
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp b/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
> new file mode 100644
> index 0000000000000000..e6c3f2cc9a09a571
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
> @@ -0,0 +1,51 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * rkisp1-timeline.cpp - Timeline handler for Rockchip ISP1
> + */
> +
> +#include "rkisp1.h"
> +
> +#include "log.h"
> +
> +namespace libcamera {
> +
> +LOG_DECLARE_CATEGORY(RkISP1)
> +
> +void RkISP1ActionSetSensor::run()
> +{
> +	sensor_->setControls(&controls_);
> +}
> +
> +void RkISP1ActionQueueBuffer::run()
> +{
> +	int ret = device_->queueBuffer(buffer_);
> +	if (ret < 0)
> +		LOG(RkISP1, Error) << "Failed to queue buffer";
> +}
> +
> +void RkISP1Timeline::bufferReady(Buffer *buffer)
> +{
> +	/*
> +	 * Calculate SOE by taking the end of DMA set by the kernel and applying
> +	 * the time offsets provideprovided by the IPA to find the best estimate
> +	 * of SOE.
> +	 */
> +
> +	ASSERT(frameOffset(SOE) == 0);
> +
> +	utils::time_point soe = std::chrono::time_point<utils::clock>()
> +		+ std::chrono::nanoseconds(buffer->timestamp())
> +		+ timeOffset(SOE);
> +
> +	notifyStartOfExposure(buffer->sequence(), soe);
> +}
> +
> +void RkISP1Timeline::setDelay(unsigned int type, int frame, int msdelay)
> +{
> +	utils::duration delay = std::chrono::milliseconds(msdelay);
> +	setRawDelay(type, frame, delay);
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> index de4ab523d0e4fe36..74dfc8ca670ffa26 100644
> --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> @@ -5,20 +5,22 @@
>   * rkisp1.cpp - Pipeline handler for Rockchip ISP1
>   */
>
> +#include "rkisp1.h"
> +
>  #include <algorithm>
>  #include <array>
>  #include <iomanip>
>  #include <memory>
> -#include <vector>
>
>  #include <linux/media-bus-format.h>
>
>  #include <libcamera/camera.h>
> +#include <libcamera/control_ids.h>
>  #include <libcamera/request.h>
>  #include <libcamera/stream.h>
>
> -#include "camera_sensor.h"
>  #include "device_enumerator.h"
> +#include "ipa_manager.h"
>  #include "log.h"
>  #include "media_device.h"
>  #include "pipeline_handler.h"
> @@ -34,7 +36,7 @@ class RkISP1CameraData : public CameraData
>  {
>  public:
>  	RkISP1CameraData(PipelineHandler *pipe)
> -		: CameraData(pipe), sensor_(nullptr)
> +		: CameraData(pipe), sensor_(nullptr), frame_(0)

Am I missing where frame_ gets reset ?

>  	{
>  	}
>
> @@ -43,8 +45,21 @@ public:
>  		delete sensor_;
>  	}
>
> +	int loadIPA();
> +
>  	Stream stream_;
>  	CameraSensor *sensor_;
> +	unsigned int frame_;
> +	std::vector<IPABuffer> ipaBuffers_;
> +	std::map<unsigned int, Request *> frameInfo_;
> +	RkISP1Timeline timeline_;
> +
> +private:
> +	void queueFrameAction(const IPAOperationData &action);
> +
> +	void queueBuffer(unsigned int frame, unsigned int type,
> +			 unsigned int id);
> +	void metadataReady(unsigned int frame, unsigned int aeState);
>  };
>
>  class RkISP1CameraConfiguration : public CameraConfiguration
> @@ -99,18 +114,116 @@ private:
>  			PipelineHandler::cameraData(camera));
>  	}
>
> +	friend RkISP1CameraData;
> +
>  	int initLinks();
>  	int createCamera(MediaEntity *sensor);
> +	void tryCompleteRequest(Request *request);
>  	void bufferReady(Buffer *buffer);
> +	void paramReady(Buffer *buffer);
> +	void statReady(Buffer *buffer);
>
>  	MediaDevice *media_;
>  	V4L2Subdevice *dphy_;
>  	V4L2Subdevice *isp_;
>  	V4L2VideoDevice *video_;
> +	V4L2VideoDevice *param_;
> +	V4L2VideoDevice *stat_;
> +
> +	BufferPool paramPool_;
> +	BufferPool statPool_;
> +
> +	std::map<unsigned int, Buffer *> paramBuffers_;
> +	std::map<unsigned int, Buffer *> statBuffers_;
>
>  	Camera *activeCamera_;
>  };
>
> +int RkISP1CameraData::loadIPA()
> +{
> +	ipa_ = IPAManager::instance()->createIPA(pipe_, 1, 1);
> +	if (!ipa_)
> +		return -ENOENT;
> +
> +	ipa_->queueFrameAction.connect(this,
> +				       &RkISP1CameraData::queueFrameAction);
> +
> +	return 0;
> +}
> +
> +void RkISP1CameraData::queueFrameAction(const IPAOperationData &action)
> +{
> +	switch (action.operation) {
> +	case RKISP1_IPA_ACTION_V4L2_SET: {
> +		unsigned int frame = action.data[0];
> +		V4L2ControlList controls = action.v4l2controls[0];
> +		timeline_.scheduleAction(utils::make_unique<RkISP1ActionSetSensor>(frame,
> +										   sensor_,
> +										   controls));
> +		break;
> +	}
> +	case RKISP1_IPA_ACTION_QUEUE_BUFFER: {
> +		unsigned int frame = action.data[0];
> +		unsigned int type = action.data[1];
> +		unsigned int id = action.data[2];
> +		queueBuffer(frame, type, id);
> +		break;
> +	}
> +	case RKISP1_IPA_ACTION_METADATA: {
> +		unsigned int frame = action.data[0];
> +		unsigned aeState = action.data[1];
> +		metadataReady(frame, aeState);
> +		break;
> +	}
> +	default:
> +		LOG(RkISP1, Error) << "Unkown action " << action.operation;
> +		break;
> +	}
> +}
> +
> +void RkISP1CameraData::queueBuffer(unsigned int frame, unsigned int type,
> +				   unsigned int id)
> +{
> +	PipelineHandlerRkISP1 *pipe =
> +		static_cast<PipelineHandlerRkISP1 *>(pipe_);
> +
> +	RkISP1ActionType acttype;
> +	V4L2VideoDevice *device;
> +	Buffer *buffer;
> +	switch (type) {
> +	case RKISP1_BUFFER_PARAM:
> +		acttype = QueueParameters;
> +		device = pipe->param_;
> +		buffer = pipe->paramBuffers_[id];
> +		break;
> +	case RKISP1_BUFFER_STAT:
> +		acttype = QueueStatistics;
> +		device = pipe->stat_;
> +		buffer = pipe->statBuffers_[id];
> +		break;
> +	default:
> +		LOG(RkISP1, Error) << "Unkown IPA buffer type " << type;
> +		return;
> +	}
> +
> +	timeline_.scheduleAction(utils::make_unique<RkISP1ActionQueueBuffer>(frame,
> +									     acttype,
> +									     device,
> +									     buffer));
> +}
> +
> +void RkISP1CameraData::metadataReady(unsigned int frame, unsigned int aeStatus)
> +{
> +	Request *request = frameInfo_[frame];
> +	PipelineHandlerRkISP1 *pipe =
> +		static_cast<PipelineHandlerRkISP1 *>(pipe_);
> +
> +	if (aeStatus)
> +		request->metadata().set<bool>(controls::AeLocked, aeStatus == 2);

It's fine for now, but I would have expected the IPA to inspect hw
stats, compute if ae was enabled or not and insert a control reporting
that in the ControlList, then signal to this slot that would just need
to complete the request.

> +
> +	pipe->tryCompleteRequest(request);
> +}
> +
>  RkISP1CameraConfiguration::RkISP1CameraConfiguration(Camera *camera,
>  						     RkISP1CameraData *data)
>  	: CameraConfiguration()
> @@ -202,12 +315,14 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
>
>  PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
>  	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
> -	  video_(nullptr)
> +	  video_(nullptr), param_(nullptr), stat_(nullptr)
>  {
>  }
>
>  PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
>  {
> +	delete param_;
> +	delete stat_;
>  	delete video_;
>  	delete isp_;
>  	delete dphy_;
> @@ -317,6 +432,20 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
>  	if (ret)
>  		return ret;
>
> +	V4L2DeviceFormat paramFormat = {};
> +	paramFormat.fourcc = V4L2_META_FMT_RK_ISP1_PARAMS;
> +
> +	ret = param_->setFormat(&paramFormat);
> +	if (ret)
> +		return ret;
> +
> +	V4L2DeviceFormat statFormat = {};
> +	statFormat.fourcc = V4L2_META_FMT_RK_ISP1_STAT_3A;
> +
> +	ret = stat_->setFormat(&statFormat);
> +	if (ret)
> +		return ret;
> +

Could you move this after you have verified outputFormat has been set
successfully just below here ?

>  	if (outputFormat.size != cfg.size ||
>  	    outputFormat.fourcc != cfg.pixelFormat) {
>  		LOG(RkISP1, Error)
> @@ -332,39 +461,133 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
>  int PipelineHandlerRkISP1::allocateBuffers(Camera *camera,
>  					   const std::set<Stream *> &streams)
>  {
> +	RkISP1CameraData *data = cameraData(camera);
>  	Stream *stream = *streams.begin();
> +	int ret;
>
>  	if (stream->memoryType() == InternalMemory)
> -		return video_->exportBuffers(&stream->bufferPool());
> +		ret = video_->exportBuffers(&stream->bufferPool());
>  	else
> -		return video_->importBuffers(&stream->bufferPool());
> +		ret = video_->importBuffers(&stream->bufferPool());
> +
> +	if (ret)
> +		return ret;
> +
> +	paramPool_.createBuffers(stream->configuration().bufferCount + 1);
> +	ret = param_->exportBuffers(&paramPool_);
> +	if (ret) {
> +		video_->releaseBuffers();
> +		return ret;
> +	}
> +
> +	statPool_.createBuffers(stream->configuration().bufferCount + 1);
> +	ret = stat_->exportBuffers(&statPool_);
> +	if (ret) {
> +		param_->releaseBuffers();
> +		video_->releaseBuffers();
> +		return ret;
> +	}
> +
> +	for (unsigned int i = 0; i < stream->configuration().bufferCount + 1; i++) {
> +		paramBuffers_[i] = new Buffer(i);
> +		statBuffers_[i] = new Buffer(i);
> +
> +		data->ipaBuffers_.push_back({
> +			.type = RKISP1_BUFFER_PARAM,
> +			.id = i,
> +			.buffer = paramPool_.buffers()[i],
> +		});
> +		data->ipaBuffers_.push_back({
> +			.type = RKISP1_BUFFER_STAT,
> +			.id = i,
> +			.buffer = statPool_.buffers()[i],
> +		});

We really need a Buffer rework :)

> +	}
> +
> +	data->ipa_->mapBuffers(data->ipaBuffers_);
> +
> +	return ret;
>  }
>
>  int PipelineHandlerRkISP1::freeBuffers(Camera *camera,
>  				       const std::set<Stream *> &streams)
>  {
> +	RkISP1CameraData *data = cameraData(camera);
> +
> +	data->ipa_->unmapBuffers(data->ipaBuffers_);
> +	data->ipaBuffers_.clear();
> +
> +	for (auto it : paramBuffers_)
> +		delete it.second;
> +
> +	paramBuffers_.clear();
> +
> +	for (auto it : statBuffers_)
> +		delete it.second;
> +
> +	statBuffers_.clear();
> +
> +	if (param_->releaseBuffers())
> +		LOG(RkISP1, Error) << "Failed to release parameters buffers";
> +
> +	if (stat_->releaseBuffers())
> +		LOG(RkISP1, Error) << "Failed to release stat buffers";
> +
>  	if (video_->releaseBuffers())
> -		LOG(RkISP1, Error) << "Failed to release buffers";
> +		LOG(RkISP1, Error) << "Failed to release video buffers";
>
>  	return 0;
>  }
>
>  int PipelineHandlerRkISP1::start(Camera *camera)
>  {
> +	RkISP1CameraData *data = cameraData(camera);
>  	int ret;
>
> +	ret = param_->streamOn();
> +	if (ret) {
> +		LOG(RkISP1, Error)
> +			<< "Failed to start parameters " << camera->name();
> +		return ret;
> +	}
> +
> +	ret = stat_->streamOn();
> +	if (ret) {
> +		param_->streamOff();
> +		LOG(RkISP1, Error)
> +			<< "Failed to start statistics " << camera->name();
> +		return ret;
> +	}
> +
>  	ret = video_->streamOn();
> -	if (ret)
> +	if (ret) {
> +		param_->streamOff();
> +		stat_->streamOff();
> +
>  		LOG(RkISP1, Error)
>  			<< "Failed to start camera " << camera->name();
> +	}
>
>  	activeCamera_ = camera;
>
> +	/* Inform IPA of stream configuration and sensor controls. */
> +	std::map<unsigned int, IPAStream> streamConfig;
> +	streamConfig[0] = {
> +		.pixelFormat = data->stream_.configuration().pixelFormat,
> +		.size = data->stream_.configuration().size,
> +	};
> +
> +	std::map<unsigned int, V4L2ControlInfoMap> entityControls;
> +	entityControls[0] = data->sensor_->controls();
> +
> +	data->ipa_->configure(streamConfig, entityControls);
> +
>  	return ret;
>  }
>
>  void PipelineHandlerRkISP1::stop(Camera *camera)
>  {
> +	RkISP1CameraData *data = cameraData(camera);
>  	int ret;
>
>  	ret = video_->streamOff();
> @@ -372,6 +595,18 @@ void PipelineHandlerRkISP1::stop(Camera *camera)
>  		LOG(RkISP1, Warning)
>  			<< "Failed to stop camera " << camera->name();
>
> +	ret = stat_->streamOff();
> +	if (ret)
> +		LOG(RkISP1, Warning)
> +			<< "Failed to stop statistics " << camera->name();
> +
> +	ret = param_->streamOff();
> +	if (ret)
> +		LOG(RkISP1, Warning)
> +			<< "Failed to stop parameters " << camera->name();
> +
> +	data->timeline_.reset();
> +
>  	activeCamera_ = nullptr;
>  }
>
> @@ -387,12 +622,22 @@ int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
>  		return -ENOENT;
>  	}
>
> -	int ret = video_->queueBuffer(buffer);
> -	if (ret < 0)
> -		return ret;
> -
>  	PipelineHandler::queueRequest(camera, request);
>
> +	data->frameInfo_[data->frame_] = request;
> +
> +	IPAOperationData op;
> +	op.operation = RKISP1_IPA_EVENT_QUEUE_REQUEST;
> +	op.data = { data->frame_ };
> +	op.controls = { request->controls() };
> +	data->ipa_->processEvent(op);
> +
> +	data->timeline_.scheduleAction(utils::make_unique<RkISP1ActionQueueBuffer>(data->frame_,
> +										   QueueVideo,
> +										   video_,
> +										   buffer));

I would have expected to see the following pattern:

Pipe::queueRequest -> IPAProcessEvent(QUEUE_REQUEST)
IPA inspects controls and program the params
IPA::queueFrameAction(PARAM);
IPA::queueFrameAction(STAT);
IPA::queueFrameAction(VIDEO);

otherwise won't you queue video before params ? Is this ok ?
If it is why do you need to go through the timeline and cannot queue
directly here ?

> +	data->frame_++;
> +
>  	return 0;
>  }
>
> @@ -435,11 +680,19 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
>  	std::unique_ptr<RkISP1CameraData> data =
>  		utils::make_unique<RkISP1CameraData>(this);
>
> +	data->controlInfo_.emplace(std::piecewise_construct,
> +				   std::forward_as_tuple(&controls::AeEnable),
> +				   std::forward_as_tuple(false, true));
> +
>  	data->sensor_ = new CameraSensor(sensor);
>  	ret = data->sensor_->init();
>  	if (ret)
>  		return ret;
>
> +	ret = data->loadIPA();
> +	if (ret)
> +		return ret;
> +
>  	std::set<Stream *> streams{ &data->stream_ };
>  	std::shared_ptr<Camera> camera =
>  		Camera::create(this, sensor->name(), streams);
> @@ -478,7 +731,17 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>  	if (video_->open() < 0)
>  		return false;
>
> +	stat_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1-statistics");
> +	if (stat_->open() < 0)
> +		return false;
> +
> +	param_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1-input-params");
> +	if (param_->open() < 0)
> +		return false;
> +
>  	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
> +	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
> +	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
>
>  	/* Configure default links. */
>  	if (initLinks() < 0) {
> @@ -504,13 +767,52 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>   * Buffer Handling
>   */
>
> +void PipelineHandlerRkISP1::tryCompleteRequest(Request *request)
> +{
> +	if (request->hasPendingBuffers())
> +		return;
> +
> +	if (request->metadata().empty())
> +		return;
> +
> +	completeRequest(activeCamera_, request);
> +}
> +
>  void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
>  {
>  	ASSERT(activeCamera_);
> +	RkISP1CameraData *data = cameraData(activeCamera_);
>  	Request *request = buffer->request();
>
> +	data->timeline_.bufferReady(buffer);
> +
> +	if (data->frame_ <= buffer->sequence())
> +		data->frame_ = buffer->sequence() + 1;
> +
>  	completeBuffer(activeCamera_, request, buffer);
> -	completeRequest(activeCamera_, request);
> +	tryCompleteRequest(request);
> +}
> +
> +void PipelineHandlerRkISP1::paramReady(Buffer *buffer)
> +{
> +	ASSERT(activeCamera_);
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +
> +	IPAOperationData op;
> +	op.operation = RKISP1_IPA_EVENT_SIGNAL_BUFFER;
> +	op.data = { RKISP1_BUFFER_PARAM, buffer->index() };
> +	data->ipa_->processEvent(op);
> +}
> +
> +void PipelineHandlerRkISP1::statReady(Buffer *buffer)
> +{
> +	ASSERT(activeCamera_);
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +
> +	IPAOperationData op;
> +	op.operation = RKISP1_IPA_EVENT_SIGNAL_BUFFER;
> +	op.data = { RKISP1_BUFFER_STAT, buffer->index() };
> +	data->ipa_->processEvent(op);
>  }
>
>  REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.h b/src/libcamera/pipeline/rkisp1/rkisp1.h
> new file mode 100644
> index 0000000000000000..f9ea3a822cb1d0f3
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.h
> @@ -0,0 +1,78 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * rkisp1.h - Pipeline handler for Rockchip ISP1
> + */
> +#ifndef __LIBCAMERA_RKISP1_H__
> +#define __LIBCAMERA_RKISP1_H__
> +
> +#include <ipa/rkisp1.h>
> +#include <libcamera/buffer.h>
> +
> +#include "camera_sensor.h"
> +#include "timeline.h"
> +#include "v4l2_videodevice.h"
> +
> +namespace libcamera {
> +
> +enum RkISP1ActionType {
> +	SetSensor,
> +	SOE,
> +	QueueVideo,
> +	QueueParameters,
> +	QueueStatistics,
> +};
> +
> +class RkISP1ActionSetSensor : public FrameAction
> +{
> +public:
> +	RkISP1ActionSetSensor(unsigned int frame, CameraSensor *sensor, V4L2ControlList controls)
> +		: FrameAction(frame, SetSensor), sensor_(sensor), controls_(controls) {}
> +
> +protected:
> +	void run() override;
> +
> +private:
> +	CameraSensor *sensor_;
> +	V4L2ControlList controls_;
> +};
> +
> +class RkISP1ActionQueueBuffer : public FrameAction
> +{
> +public:
> +	RkISP1ActionQueueBuffer(unsigned int frame, RkISP1ActionType type,
> +				V4L2VideoDevice *device, Buffer *buffer)
> +		: FrameAction(frame, type), device_(device), buffer_(buffer)
> +	{
> +	}

Be consistent, you have used {} on the same line in the previous class
declaration

> +
> +protected:
> +	void run() override;
> +
> +private:
> +	V4L2VideoDevice *device_;
> +	Buffer *buffer_;
> +};
> +
> +class RkISP1Timeline : public Timeline
> +{
> +public:
> +	RkISP1Timeline()
> +		: Timeline()
> +	{
> +		setDelay(SetSensor, -1, 5);
> +		setDelay(SOE, 0, -1);
> +		setDelay(QueueVideo, -1, 10);
> +		setDelay(QueueParameters, -1, 8);
> +		setDelay(QueueStatistics, -1, 8);

I don't get this, what does a -1 frame offset stands for? Are the
delays arbitrary ?

> +	}
> +
> +	void bufferReady(Buffer *buffer);
> +
> +	void setDelay(unsigned int type, int frame, int msdelay);
> +};
> +
> +} /* namespace libcamera */
> +
> +#endif /* __LIBCAMERA_RKISP1_H__ */
> diff --git a/src/libcamera/pipeline/rkisp1/timeline.cpp b/src/libcamera/pipeline/rkisp1/timeline.cpp
> new file mode 100644
> index 0000000000000000..95ec6b7ca0a0237a
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/timeline.cpp
> @@ -0,0 +1,229 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * timeline.cpp - Timeline for per-frame control
> + */
> +
> +#include "timeline.h"
> +
> +#include "log.h"
> +
> +/**
> + * \file timeline.h
> + * \brief Timeline for per-frame control
> + */
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(Timeline)
> +
> +/**
> + * \class FrameAction
> + * \brief Action that can be schedule on a Timeline
> + *
> + * A frame action is an event that can be schedule to be executed at a point
s/that can be//

> + * in time. A frame action has two primal attributes: frame number and type.

I would "a frame number and a type"

> + *
> + * The frame number describes the frame for which the action should have an

the frame to which the action is associated to

> + * effect. The type is a numerical ID which identifies the action within the
> + * pipeline and IPA protocol.
> + *
> + * \sa Timeline
> + */
> +
> +/**
> + * \class Timeline
> + * \brief Executor of FrameActions

do not pluralize class names

> + *
> + * The timeline have three primary functions.

s/have/has
s/function./functions:/

> + *
> + * 1. Keep track of the Start of Exposure (SOE) for every frame processed by
> + *    the hardware. Using this information it shall keep an up-to-date estimate
> + *    of the frame interval (time between two SOEs).

frame interval or frame duration ?
two SOEs/two consective SOE events/ ?

> + *
> + *    The estimated frame interval together with recorded SOE events is the
s/is/are

> + *    foundation for how the timeline schedule FrameActions at specific points
> + *    in time.

I would "are used as parameter to calculate the time delay from the
current time each scheduled FrameAction should be associated with"

> + *    \todo Improve the frame interval estimation algorithm.
> + *
> + * 2. Keep track of current delays for different types of actions. The delays
> + *    for different actions might differ during a capture session. Exposure time
> + *    effects the over all FPS and different ISP parameters might impacts its
> + *    processing time.
> + *
> + *    The action type delays shall be updated by the IPA in conjunction with
> + *    how it changes the capture parameters.

I apreciate the effort in documenting this as I complained about it in
the last review, but I still don't get -why- a frame offset should be
associated to an even type... If tuning of parameters on the ISP might
in the future change the frame duration calculation, I agree the IPA
could try to provide an estimation and associate it with a scheduled
action (not sure if a single action or an action type). But frame
offset? How do you see it used in practice ?

> + *
> + * 3. Schedule actions on the timeline. This is the process of taking a
> + *    FrameAction which contains an abstract description of what frame and
> + *    what type of action it contains and turning that into an time point
> + *    and make sure the action is executed at that time.
> + */
> +
> +Timeline::Timeline()
> +	: frameInterval_(0)
> +{
> +	timer_.timeout.connect(this, &Timeline::timeout);
> +}
> +
> +/**
> + * \brief Reset and stop the timeline
> + *
> + * The timeline needs to be reset when the timeline should no longer execute
> + * actions. A timeline should be reset between two capture sessions to prevent
> + * the old capture session to effect the second one.
> + */
> +void Timeline::reset()
> +{
> +	timer_.stop();
> +
> +	actions_.clear();
> +	history_.clear();
> +}
> +
> +/**
> + * \brief Schedule action on the timeline
s/action/an action/
> + * \param[in] action FrameAction to schedule
> + *
> + * The act of scheduling an action to the timeline is the process of taking
> + * the properties of the action (type, frame and time offsets) and translating
> + * that to a time point using the current values for the action type timings
> + * value recorded in the timeline. If an action is scheduled too late, execute
> + * it immediately.
> + */
> +void Timeline::scheduleAction(std::unique_ptr<FrameAction> action)
> +{
> +	unsigned int lastFrame;
> +	utils::time_point lastTime;
> +
> +	if (history_.empty()) {
> +		lastFrame = 0;
> +		lastTime = std::chrono::steady_clock::now();
> +	} else {
> +		lastFrame = history_.back().first;
> +		lastTime = history_.back().second;
> +	}
> +
> +	/*
> +	 * Calculate when the action shall be schedule by first finding out how
> +	 * many frames in the future the action acts on and then add the actions
> +	 * frame offset. After the spatial frame offset is found out translate
> +	 * that to a time point by using the last estimated start of exposure
> +	 * (SOE) as the fixed offset. Lastly add the actions time offset to the

not offset but the frame duration (not the SOE) it's here the
multiplication unit.

s/actions/action/

> +	 * time point.
> +	 */
> +	int frame = action->frame() - lastFrame + frameOffset(action->type());
> +	utils::time_point deadline = lastTime + frame * frameInterval_
> +		+ timeOffset(action->type());
> +
> +	utils::time_point now = std::chrono::steady_clock::now();
> +	if (deadline < now) {
> +		LOG(Timeline, Warning)
> +			<< "Action scheduled too late "
> +			<< utils::time_point_to_string(deadline)
> +			<< ", run now " << utils::time_point_to_string(now);
> +		action->run();
> +	} else {
> +		actions_.insert({ deadline, std::move(action) });

actions_ is sorted as time_points are comparable! I missed that part
in the previous review, sorry

> +		updateDeadline();
> +	}
> +}
> +
> +void Timeline::notifyStartOfExposure(unsigned int frame, utils::time_point time)
> +{
> +	history_.push_back(std::make_pair(frame, time));
> +
> +	if (history_.size() <= HISTORY_DEPTH / 2)
> +		return;

Not enough history to make a good estimation ?

> +
> +	while (history_.size() > HISTORY_DEPTH)
> +		history_.pop_front();

Will you ever be out of more than 1 item ? ie. do you need to loop or
once you reach full capacity you just pop the head and insert on the
back everytime ?

> +
> +	/* Update esitmated time between two start of exposures. */
> +	utils::duration sumExposures(0);
> +	unsigned int numExposures = 0;
> +
> +	utils::time_point lastTime;
> +	for (auto it = history_.begin(); it != history_.end(); it++) {
> +		if (it != history_.begin()) {
> +			sumExposures += it->second - lastTime;
> +			numExposures++;
> +		}
> +
> +		lastTime = it->second;
> +	}
> +
> +	frameInterval_ = sumExposures;
> +	if (numExposures)
> +		frameInterval_ /= numExposures;
> +}
> +
> +int Timeline::frameOffset(unsigned int type) const
> +{
> +	const auto it = delays_.find(type);
> +	if (it == delays_.end()) {
> +		LOG(Timeline, Error)
> +			<< "No frame offset set for action type " << type;
> +		return 0;
> +	}
> +
> +	return it->second.first;
> +}
> +
> +utils::duration Timeline::timeOffset(unsigned int type) const
> +{
> +	const auto it = delays_.find(type);
> +	if (it == delays_.end()) {
> +		LOG(Timeline, Error)
> +			<< "No time offset set for action type " << type;
> +		return utils::duration::zero();
> +	}
> +
> +	return it->second.second;
> +}
> +
> +void Timeline::setRawDelay(unsigned int type, int frame, utils::duration time)
> +{
> +	delays_[type] = std::make_pair(frame, time);
> +}
> +
> +void Timeline::updateDeadline()
> +{
> +	if (actions_.empty())
> +		return;
> +
> +	const utils::time_point &deadline = actions_.begin()->first;
> +
> +	if (timer_.isRunning() && deadline >= timer_.deadline())
> +		return;
> +
> +	if (deadline <= std::chrono::steady_clock::now()) {
> +		timeout(&timer_);
> +		return;
> +	}
> +
> +	timer_.start(deadline);
> +}
> +
> +void Timeline::timeout(Timer *timer)
> +{
> +	utils::time_point now = std::chrono::steady_clock::now();
> +
> +	for (auto it = actions_.begin(); it != actions_.end();) {
> +		const utils::time_point &sched = it->first;
> +
> +		if (sched > now)
> +			break;
> +
> +		FrameAction *action = it->second.get();
> +
> +		action->run();
> +
> +		it = actions_.erase(it);
> +	}
> +
> +	updateDeadline();
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/pipeline/rkisp1/timeline.h b/src/libcamera/pipeline/rkisp1/timeline.h
> new file mode 100644
> index 0000000000000000..9d30e4eaf8743d07
> --- /dev/null
> +++ b/src/libcamera/pipeline/rkisp1/timeline.h
> @@ -0,0 +1,72 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * timeline.h - Timeline for per-frame controls
> + */
> +#ifndef __LIBCAMERA_TIMELINE_H__
> +#define __LIBCAMERA_TIMELINE_H__
> +
> +#include <list>
> +#include <map>
> +
> +#include <libcamera/timer.h>
> +
> +#include "utils.h"
> +
> +namespace libcamera {
> +
> +class FrameAction
> +{
> +public:
> +	FrameAction(unsigned int frame, unsigned int type)
> +		: frame_(frame), type_(type) {}
> +
> +	virtual ~FrameAction() {}
> +
> +	unsigned int frame() const { return frame_; }
> +	unsigned int type() const { return type_; }
> +
> +	virtual void run() = 0;
> +
> +private:
> +	unsigned int frame_;
> +	unsigned int type_;
> +};
> +
> +class Timeline
> +{
> +public:
> +	Timeline();
> +	virtual ~Timeline() {}
> +
> +	virtual void reset();
> +	virtual void scheduleAction(std::unique_ptr<FrameAction> action);
> +	virtual void notifyStartOfExposure(unsigned int frame, utils::time_point time);
> +
> +	utils::duration frameInterval() const { return frameInterval_; }
> +
> +protected:
> +	int frameOffset(unsigned int type) const;
> +	utils::duration timeOffset(unsigned int type) const;
> +
> +	void setRawDelay(unsigned int type, int frame, utils::duration time);
> +
> +	std::map<unsigned int, std::pair<int, utils::duration>> delays_;
> +
> +private:
> +	static constexpr unsigned int HISTORY_DEPTH = 10;
> +
> +	void timeout(Timer *timer);
> +	void updateDeadline();
> +
> +	std::list<std::pair<unsigned int, utils::time_point>> history_;
> +	std::multimap<utils::time_point, std::unique_ptr<FrameAction>> actions_;
> +	utils::duration frameInterval_;
> +
> +	Timer timer_;
> +};
> +
> +} /* namespace libcamera */
> +
> +#endif /* __LIBCAMERA_TIMELINE_H__ */
> --
> 2.23.0
>
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel

Patch

diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
index f1cc4046b5d064cb..7791e209175d95a9 100644
--- a/src/libcamera/pipeline/rkisp1/meson.build
+++ b/src/libcamera/pipeline/rkisp1/meson.build
@@ -1,3 +1,5 @@ 
 libcamera_sources += files([
     'rkisp1.cpp',
+    'rkisp1-timeline.cpp',
+    'timeline.cpp',
 ])
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp b/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
new file mode 100644
index 0000000000000000..e6c3f2cc9a09a571
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/rkisp1-timeline.cpp
@@ -0,0 +1,51 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * rkisp1-timeline.cpp - Timeline handler for Rockchip ISP1
+ */
+
+#include "rkisp1.h"
+
+#include "log.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(RkISP1)
+
+void RkISP1ActionSetSensor::run()
+{
+	sensor_->setControls(&controls_);
+}
+
+void RkISP1ActionQueueBuffer::run()
+{
+	int ret = device_->queueBuffer(buffer_);
+	if (ret < 0)
+		LOG(RkISP1, Error) << "Failed to queue buffer";
+}
+
+void RkISP1Timeline::bufferReady(Buffer *buffer)
+{
+	/*
+	 * Calculate SOE by taking the end of DMA set by the kernel and applying
+	 * the time offsets provideprovided by the IPA to find the best estimate
+	 * of SOE.
+	 */
+
+	ASSERT(frameOffset(SOE) == 0);
+
+	utils::time_point soe = std::chrono::time_point<utils::clock>()
+		+ std::chrono::nanoseconds(buffer->timestamp())
+		+ timeOffset(SOE);
+
+	notifyStartOfExposure(buffer->sequence(), soe);
+}
+
+void RkISP1Timeline::setDelay(unsigned int type, int frame, int msdelay)
+{
+	utils::duration delay = std::chrono::milliseconds(msdelay);
+	setRawDelay(type, frame, delay);
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
index de4ab523d0e4fe36..74dfc8ca670ffa26 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -5,20 +5,22 @@ 
  * rkisp1.cpp - Pipeline handler for Rockchip ISP1
  */
 
+#include "rkisp1.h"
+
 #include <algorithm>
 #include <array>
 #include <iomanip>
 #include <memory>
-#include <vector>
 
 #include <linux/media-bus-format.h>
 
 #include <libcamera/camera.h>
+#include <libcamera/control_ids.h>
 #include <libcamera/request.h>
 #include <libcamera/stream.h>
 
-#include "camera_sensor.h"
 #include "device_enumerator.h"
+#include "ipa_manager.h"
 #include "log.h"
 #include "media_device.h"
 #include "pipeline_handler.h"
@@ -34,7 +36,7 @@  class RkISP1CameraData : public CameraData
 {
 public:
 	RkISP1CameraData(PipelineHandler *pipe)
-		: CameraData(pipe), sensor_(nullptr)
+		: CameraData(pipe), sensor_(nullptr), frame_(0)
 	{
 	}
 
@@ -43,8 +45,21 @@  public:
 		delete sensor_;
 	}
 
+	int loadIPA();
+
 	Stream stream_;
 	CameraSensor *sensor_;
+	unsigned int frame_;
+	std::vector<IPABuffer> ipaBuffers_;
+	std::map<unsigned int, Request *> frameInfo_;
+	RkISP1Timeline timeline_;
+
+private:
+	void queueFrameAction(const IPAOperationData &action);
+
+	void queueBuffer(unsigned int frame, unsigned int type,
+			 unsigned int id);
+	void metadataReady(unsigned int frame, unsigned int aeState);
 };
 
 class RkISP1CameraConfiguration : public CameraConfiguration
@@ -99,18 +114,116 @@  private:
 			PipelineHandler::cameraData(camera));
 	}
 
+	friend RkISP1CameraData;
+
 	int initLinks();
 	int createCamera(MediaEntity *sensor);
+	void tryCompleteRequest(Request *request);
 	void bufferReady(Buffer *buffer);
+	void paramReady(Buffer *buffer);
+	void statReady(Buffer *buffer);
 
 	MediaDevice *media_;
 	V4L2Subdevice *dphy_;
 	V4L2Subdevice *isp_;
 	V4L2VideoDevice *video_;
+	V4L2VideoDevice *param_;
+	V4L2VideoDevice *stat_;
+
+	BufferPool paramPool_;
+	BufferPool statPool_;
+
+	std::map<unsigned int, Buffer *> paramBuffers_;
+	std::map<unsigned int, Buffer *> statBuffers_;
 
 	Camera *activeCamera_;
 };
 
+int RkISP1CameraData::loadIPA()
+{
+	ipa_ = IPAManager::instance()->createIPA(pipe_, 1, 1);
+	if (!ipa_)
+		return -ENOENT;
+
+	ipa_->queueFrameAction.connect(this,
+				       &RkISP1CameraData::queueFrameAction);
+
+	return 0;
+}
+
+void RkISP1CameraData::queueFrameAction(const IPAOperationData &action)
+{
+	switch (action.operation) {
+	case RKISP1_IPA_ACTION_V4L2_SET: {
+		unsigned int frame = action.data[0];
+		V4L2ControlList controls = action.v4l2controls[0];
+		timeline_.scheduleAction(utils::make_unique<RkISP1ActionSetSensor>(frame,
+										   sensor_,
+										   controls));
+		break;
+	}
+	case RKISP1_IPA_ACTION_QUEUE_BUFFER: {
+		unsigned int frame = action.data[0];
+		unsigned int type = action.data[1];
+		unsigned int id = action.data[2];
+		queueBuffer(frame, type, id);
+		break;
+	}
+	case RKISP1_IPA_ACTION_METADATA: {
+		unsigned int frame = action.data[0];
+		unsigned aeState = action.data[1];
+		metadataReady(frame, aeState);
+		break;
+	}
+	default:
+		LOG(RkISP1, Error) << "Unkown action " << action.operation;
+		break;
+	}
+}
+
+void RkISP1CameraData::queueBuffer(unsigned int frame, unsigned int type,
+				   unsigned int id)
+{
+	PipelineHandlerRkISP1 *pipe =
+		static_cast<PipelineHandlerRkISP1 *>(pipe_);
+
+	RkISP1ActionType acttype;
+	V4L2VideoDevice *device;
+	Buffer *buffer;
+	switch (type) {
+	case RKISP1_BUFFER_PARAM:
+		acttype = QueueParameters;
+		device = pipe->param_;
+		buffer = pipe->paramBuffers_[id];
+		break;
+	case RKISP1_BUFFER_STAT:
+		acttype = QueueStatistics;
+		device = pipe->stat_;
+		buffer = pipe->statBuffers_[id];
+		break;
+	default:
+		LOG(RkISP1, Error) << "Unkown IPA buffer type " << type;
+		return;
+	}
+
+	timeline_.scheduleAction(utils::make_unique<RkISP1ActionQueueBuffer>(frame,
+									     acttype,
+									     device,
+									     buffer));
+}
+
+void RkISP1CameraData::metadataReady(unsigned int frame, unsigned int aeStatus)
+{
+	Request *request = frameInfo_[frame];
+	PipelineHandlerRkISP1 *pipe =
+		static_cast<PipelineHandlerRkISP1 *>(pipe_);
+
+	if (aeStatus)
+		request->metadata().set<bool>(controls::AeLocked, aeStatus == 2);
+
+	pipe->tryCompleteRequest(request);
+}
+
 RkISP1CameraConfiguration::RkISP1CameraConfiguration(Camera *camera,
 						     RkISP1CameraData *data)
 	: CameraConfiguration()
@@ -202,12 +315,14 @@  CameraConfiguration::Status RkISP1CameraConfiguration::validate()
 
 PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
 	: PipelineHandler(manager), dphy_(nullptr), isp_(nullptr),
-	  video_(nullptr)
+	  video_(nullptr), param_(nullptr), stat_(nullptr)
 {
 }
 
 PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
 {
+	delete param_;
+	delete stat_;
 	delete video_;
 	delete isp_;
 	delete dphy_;
@@ -317,6 +432,20 @@  int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
 	if (ret)
 		return ret;
 
+	V4L2DeviceFormat paramFormat = {};
+	paramFormat.fourcc = V4L2_META_FMT_RK_ISP1_PARAMS;
+
+	ret = param_->setFormat(&paramFormat);
+	if (ret)
+		return ret;
+
+	V4L2DeviceFormat statFormat = {};
+	statFormat.fourcc = V4L2_META_FMT_RK_ISP1_STAT_3A;
+
+	ret = stat_->setFormat(&statFormat);
+	if (ret)
+		return ret;
+
 	if (outputFormat.size != cfg.size ||
 	    outputFormat.fourcc != cfg.pixelFormat) {
 		LOG(RkISP1, Error)
@@ -332,39 +461,133 @@  int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
 int PipelineHandlerRkISP1::allocateBuffers(Camera *camera,
 					   const std::set<Stream *> &streams)
 {
+	RkISP1CameraData *data = cameraData(camera);
 	Stream *stream = *streams.begin();
+	int ret;
 
 	if (stream->memoryType() == InternalMemory)
-		return video_->exportBuffers(&stream->bufferPool());
+		ret = video_->exportBuffers(&stream->bufferPool());
 	else
-		return video_->importBuffers(&stream->bufferPool());
+		ret = video_->importBuffers(&stream->bufferPool());
+
+	if (ret)
+		return ret;
+
+	paramPool_.createBuffers(stream->configuration().bufferCount + 1);
+	ret = param_->exportBuffers(&paramPool_);
+	if (ret) {
+		video_->releaseBuffers();
+		return ret;
+	}
+
+	statPool_.createBuffers(stream->configuration().bufferCount + 1);
+	ret = stat_->exportBuffers(&statPool_);
+	if (ret) {
+		param_->releaseBuffers();
+		video_->releaseBuffers();
+		return ret;
+	}
+
+	for (unsigned int i = 0; i < stream->configuration().bufferCount + 1; i++) {
+		paramBuffers_[i] = new Buffer(i);
+		statBuffers_[i] = new Buffer(i);
+
+		data->ipaBuffers_.push_back({
+			.type = RKISP1_BUFFER_PARAM,
+			.id = i,
+			.buffer = paramPool_.buffers()[i],
+		});
+		data->ipaBuffers_.push_back({
+			.type = RKISP1_BUFFER_STAT,
+			.id = i,
+			.buffer = statPool_.buffers()[i],
+		});
+	}
+
+	data->ipa_->mapBuffers(data->ipaBuffers_);
+
+	return ret;
 }
 
 int PipelineHandlerRkISP1::freeBuffers(Camera *camera,
 				       const std::set<Stream *> &streams)
 {
+	RkISP1CameraData *data = cameraData(camera);
+
+	data->ipa_->unmapBuffers(data->ipaBuffers_);
+	data->ipaBuffers_.clear();
+
+	for (auto it : paramBuffers_)
+		delete it.second;
+
+	paramBuffers_.clear();
+
+	for (auto it : statBuffers_)
+		delete it.second;
+
+	statBuffers_.clear();
+
+	if (param_->releaseBuffers())
+		LOG(RkISP1, Error) << "Failed to release parameters buffers";
+
+	if (stat_->releaseBuffers())
+		LOG(RkISP1, Error) << "Failed to release stat buffers";
+
 	if (video_->releaseBuffers())
-		LOG(RkISP1, Error) << "Failed to release buffers";
+		LOG(RkISP1, Error) << "Failed to release video buffers";
 
 	return 0;
 }
 
 int PipelineHandlerRkISP1::start(Camera *camera)
 {
+	RkISP1CameraData *data = cameraData(camera);
 	int ret;
 
+	ret = param_->streamOn();
+	if (ret) {
+		LOG(RkISP1, Error)
+			<< "Failed to start parameters " << camera->name();
+		return ret;
+	}
+
+	ret = stat_->streamOn();
+	if (ret) {
+		param_->streamOff();
+		LOG(RkISP1, Error)
+			<< "Failed to start statistics " << camera->name();
+		return ret;
+	}
+
 	ret = video_->streamOn();
-	if (ret)
+	if (ret) {
+		param_->streamOff();
+		stat_->streamOff();
+
 		LOG(RkISP1, Error)
 			<< "Failed to start camera " << camera->name();
+	}
 
 	activeCamera_ = camera;
 
+	/* Inform IPA of stream configuration and sensor controls. */
+	std::map<unsigned int, IPAStream> streamConfig;
+	streamConfig[0] = {
+		.pixelFormat = data->stream_.configuration().pixelFormat,
+		.size = data->stream_.configuration().size,
+	};
+
+	std::map<unsigned int, V4L2ControlInfoMap> entityControls;
+	entityControls[0] = data->sensor_->controls();
+
+	data->ipa_->configure(streamConfig, entityControls);
+
 	return ret;
 }
 
 void PipelineHandlerRkISP1::stop(Camera *camera)
 {
+	RkISP1CameraData *data = cameraData(camera);
 	int ret;
 
 	ret = video_->streamOff();
@@ -372,6 +595,18 @@  void PipelineHandlerRkISP1::stop(Camera *camera)
 		LOG(RkISP1, Warning)
 			<< "Failed to stop camera " << camera->name();
 
+	ret = stat_->streamOff();
+	if (ret)
+		LOG(RkISP1, Warning)
+			<< "Failed to stop statistics " << camera->name();
+
+	ret = param_->streamOff();
+	if (ret)
+		LOG(RkISP1, Warning)
+			<< "Failed to stop parameters " << camera->name();
+
+	data->timeline_.reset();
+
 	activeCamera_ = nullptr;
 }
 
@@ -387,12 +622,22 @@  int PipelineHandlerRkISP1::queueRequest(Camera *camera, Request *request)
 		return -ENOENT;
 	}
 
-	int ret = video_->queueBuffer(buffer);
-	if (ret < 0)
-		return ret;
-
 	PipelineHandler::queueRequest(camera, request);
 
+	data->frameInfo_[data->frame_] = request;
+
+	IPAOperationData op;
+	op.operation = RKISP1_IPA_EVENT_QUEUE_REQUEST;
+	op.data = { data->frame_ };
+	op.controls = { request->controls() };
+	data->ipa_->processEvent(op);
+
+	data->timeline_.scheduleAction(utils::make_unique<RkISP1ActionQueueBuffer>(data->frame_,
+										   QueueVideo,
+										   video_,
+										   buffer));
+	data->frame_++;
+
 	return 0;
 }
 
@@ -435,11 +680,19 @@  int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
 	std::unique_ptr<RkISP1CameraData> data =
 		utils::make_unique<RkISP1CameraData>(this);
 
+	data->controlInfo_.emplace(std::piecewise_construct,
+				   std::forward_as_tuple(&controls::AeEnable),
+				   std::forward_as_tuple(false, true));
+
 	data->sensor_ = new CameraSensor(sensor);
 	ret = data->sensor_->init();
 	if (ret)
 		return ret;
 
+	ret = data->loadIPA();
+	if (ret)
+		return ret;
+
 	std::set<Stream *> streams{ &data->stream_ };
 	std::shared_ptr<Camera> camera =
 		Camera::create(this, sensor->name(), streams);
@@ -478,7 +731,17 @@  bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
 	if (video_->open() < 0)
 		return false;
 
+	stat_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1-statistics");
+	if (stat_->open() < 0)
+		return false;
+
+	param_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1-input-params");
+	if (param_->open() < 0)
+		return false;
+
 	video_->bufferReady.connect(this, &PipelineHandlerRkISP1::bufferReady);
+	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
+	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
 
 	/* Configure default links. */
 	if (initLinks() < 0) {
@@ -504,13 +767,52 @@  bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
  * Buffer Handling
  */
 
+void PipelineHandlerRkISP1::tryCompleteRequest(Request *request)
+{
+	if (request->hasPendingBuffers())
+		return;
+
+	if (request->metadata().empty())
+		return;
+
+	completeRequest(activeCamera_, request);
+}
+
 void PipelineHandlerRkISP1::bufferReady(Buffer *buffer)
 {
 	ASSERT(activeCamera_);
+	RkISP1CameraData *data = cameraData(activeCamera_);
 	Request *request = buffer->request();
 
+	data->timeline_.bufferReady(buffer);
+
+	if (data->frame_ <= buffer->sequence())
+		data->frame_ = buffer->sequence() + 1;
+
 	completeBuffer(activeCamera_, request, buffer);
-	completeRequest(activeCamera_, request);
+	tryCompleteRequest(request);
+}
+
+void PipelineHandlerRkISP1::paramReady(Buffer *buffer)
+{
+	ASSERT(activeCamera_);
+	RkISP1CameraData *data = cameraData(activeCamera_);
+
+	IPAOperationData op;
+	op.operation = RKISP1_IPA_EVENT_SIGNAL_BUFFER;
+	op.data = { RKISP1_BUFFER_PARAM, buffer->index() };
+	data->ipa_->processEvent(op);
+}
+
+void PipelineHandlerRkISP1::statReady(Buffer *buffer)
+{
+	ASSERT(activeCamera_);
+	RkISP1CameraData *data = cameraData(activeCamera_);
+
+	IPAOperationData op;
+	op.operation = RKISP1_IPA_EVENT_SIGNAL_BUFFER;
+	op.data = { RKISP1_BUFFER_STAT, buffer->index() };
+	data->ipa_->processEvent(op);
 }
 
 REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.h b/src/libcamera/pipeline/rkisp1/rkisp1.h
new file mode 100644
index 0000000000000000..f9ea3a822cb1d0f3
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.h
@@ -0,0 +1,78 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * rkisp1.h - Pipeline handler for Rockchip ISP1
+ */
+#ifndef __LIBCAMERA_RKISP1_H__
+#define __LIBCAMERA_RKISP1_H__
+
+#include <ipa/rkisp1.h>
+#include <libcamera/buffer.h>
+
+#include "camera_sensor.h"
+#include "timeline.h"
+#include "v4l2_videodevice.h"
+
+namespace libcamera {
+
+enum RkISP1ActionType {
+	SetSensor,
+	SOE,
+	QueueVideo,
+	QueueParameters,
+	QueueStatistics,
+};
+
+class RkISP1ActionSetSensor : public FrameAction
+{
+public:
+	RkISP1ActionSetSensor(unsigned int frame, CameraSensor *sensor, V4L2ControlList controls)
+		: FrameAction(frame, SetSensor), sensor_(sensor), controls_(controls) {}
+
+protected:
+	void run() override;
+
+private:
+	CameraSensor *sensor_;
+	V4L2ControlList controls_;
+};
+
+class RkISP1ActionQueueBuffer : public FrameAction
+{
+public:
+	RkISP1ActionQueueBuffer(unsigned int frame, RkISP1ActionType type,
+				V4L2VideoDevice *device, Buffer *buffer)
+		: FrameAction(frame, type), device_(device), buffer_(buffer)
+	{
+	}
+
+protected:
+	void run() override;
+
+private:
+	V4L2VideoDevice *device_;
+	Buffer *buffer_;
+};
+
+class RkISP1Timeline : public Timeline
+{
+public:
+	RkISP1Timeline()
+		: Timeline()
+	{
+		setDelay(SetSensor, -1, 5);
+		setDelay(SOE, 0, -1);
+		setDelay(QueueVideo, -1, 10);
+		setDelay(QueueParameters, -1, 8);
+		setDelay(QueueStatistics, -1, 8);
+	}
+
+	void bufferReady(Buffer *buffer);
+
+	void setDelay(unsigned int type, int frame, int msdelay);
+};
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_RKISP1_H__ */
diff --git a/src/libcamera/pipeline/rkisp1/timeline.cpp b/src/libcamera/pipeline/rkisp1/timeline.cpp
new file mode 100644
index 0000000000000000..95ec6b7ca0a0237a
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/timeline.cpp
@@ -0,0 +1,229 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * timeline.cpp - Timeline for per-frame control
+ */
+
+#include "timeline.h"
+
+#include "log.h"
+
+/**
+ * \file timeline.h
+ * \brief Timeline for per-frame control
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(Timeline)
+
+/**
+ * \class FrameAction
+ * \brief Action that can be schedule on a Timeline
+ *
+ * A frame action is an event that can be schedule to be executed at a point
+ * in time. A frame action has two primal attributes: frame number and type.
+ *
+ * The frame number describes the frame for which the action should have an
+ * effect. The type is a numerical ID which identifies the action within the
+ * pipeline and IPA protocol.
+ *
+ * \sa Timeline
+ */
+
+/**
+ * \class Timeline
+ * \brief Executor of FrameActions
+ *
+ * The timeline have three primary functions.
+ *
+ * 1. Keep track of the Start of Exposure (SOE) for every frame processed by
+ *    the hardware. Using this information it shall keep an up-to-date estimate
+ *    of the frame interval (time between two SOEs).
+ *
+ *    The estimated frame interval together with recorded SOE events is the
+ *    foundation for how the timeline schedule FrameActions at specific points
+ *    in time.
+ *    \todo Improve the frame interval estimation algorithm.
+ *
+ * 2. Keep track of current delays for different types of actions. The delays
+ *    for different actions might differ during a capture session. Exposure time
+ *    effects the over all FPS and different ISP parameters might impacts its
+ *    processing time.
+ *
+ *    The action type delays shall be updated by the IPA in conjunction with
+ *    how it changes the capture parameters.
+ *
+ * 3. Schedule actions on the timeline. This is the process of taking a
+ *    FrameAction which contains an abstract description of what frame and
+ *    what type of action it contains and turning that into an time point
+ *    and make sure the action is executed at that time.
+ */
+
+Timeline::Timeline()
+	: frameInterval_(0)
+{
+	timer_.timeout.connect(this, &Timeline::timeout);
+}
+
+/**
+ * \brief Reset and stop the timeline
+ *
+ * The timeline needs to be reset when the timeline should no longer execute
+ * actions. A timeline should be reset between two capture sessions to prevent
+ * the old capture session to effect the second one.
+ */
+void Timeline::reset()
+{
+	timer_.stop();
+
+	actions_.clear();
+	history_.clear();
+}
+
+/**
+ * \brief Schedule action on the timeline
+ * \param[in] action FrameAction to schedule
+ *
+ * The act of scheduling an action to the timeline is the process of taking
+ * the properties of the action (type, frame and time offsets) and translating
+ * that to a time point using the current values for the action type timings
+ * value recorded in the timeline. If an action is scheduled too late, execute
+ * it immediately.
+ */
+void Timeline::scheduleAction(std::unique_ptr<FrameAction> action)
+{
+	unsigned int lastFrame;
+	utils::time_point lastTime;
+
+	if (history_.empty()) {
+		lastFrame = 0;
+		lastTime = std::chrono::steady_clock::now();
+	} else {
+		lastFrame = history_.back().first;
+		lastTime = history_.back().second;
+	}
+
+	/*
+	 * Calculate when the action shall be schedule by first finding out how
+	 * many frames in the future the action acts on and then add the actions
+	 * frame offset. After the spatial frame offset is found out translate
+	 * that to a time point by using the last estimated start of exposure
+	 * (SOE) as the fixed offset. Lastly add the actions time offset to the
+	 * time point.
+	 */
+	int frame = action->frame() - lastFrame + frameOffset(action->type());
+	utils::time_point deadline = lastTime + frame * frameInterval_
+		+ timeOffset(action->type());
+
+	utils::time_point now = std::chrono::steady_clock::now();
+	if (deadline < now) {
+		LOG(Timeline, Warning)
+			<< "Action scheduled too late "
+			<< utils::time_point_to_string(deadline)
+			<< ", run now " << utils::time_point_to_string(now);
+		action->run();
+	} else {
+		actions_.insert({ deadline, std::move(action) });
+		updateDeadline();
+	}
+}
+
+void Timeline::notifyStartOfExposure(unsigned int frame, utils::time_point time)
+{
+	history_.push_back(std::make_pair(frame, time));
+
+	if (history_.size() <= HISTORY_DEPTH / 2)
+		return;
+
+	while (history_.size() > HISTORY_DEPTH)
+		history_.pop_front();
+
+	/* Update esitmated time between two start of exposures. */
+	utils::duration sumExposures(0);
+	unsigned int numExposures = 0;
+
+	utils::time_point lastTime;
+	for (auto it = history_.begin(); it != history_.end(); it++) {
+		if (it != history_.begin()) {
+			sumExposures += it->second - lastTime;
+			numExposures++;
+		}
+
+		lastTime = it->second;
+	}
+
+	frameInterval_ = sumExposures;
+	if (numExposures)
+		frameInterval_ /= numExposures;
+}
+
+int Timeline::frameOffset(unsigned int type) const
+{
+	const auto it = delays_.find(type);
+	if (it == delays_.end()) {
+		LOG(Timeline, Error)
+			<< "No frame offset set for action type " << type;
+		return 0;
+	}
+
+	return it->second.first;
+}
+
+utils::duration Timeline::timeOffset(unsigned int type) const
+{
+	const auto it = delays_.find(type);
+	if (it == delays_.end()) {
+		LOG(Timeline, Error)
+			<< "No time offset set for action type " << type;
+		return utils::duration::zero();
+	}
+
+	return it->second.second;
+}
+
+void Timeline::setRawDelay(unsigned int type, int frame, utils::duration time)
+{
+	delays_[type] = std::make_pair(frame, time);
+}
+
+void Timeline::updateDeadline()
+{
+	if (actions_.empty())
+		return;
+
+	const utils::time_point &deadline = actions_.begin()->first;
+
+	if (timer_.isRunning() && deadline >= timer_.deadline())
+		return;
+
+	if (deadline <= std::chrono::steady_clock::now()) {
+		timeout(&timer_);
+		return;
+	}
+
+	timer_.start(deadline);
+}
+
+void Timeline::timeout(Timer *timer)
+{
+	utils::time_point now = std::chrono::steady_clock::now();
+
+	for (auto it = actions_.begin(); it != actions_.end();) {
+		const utils::time_point &sched = it->first;
+
+		if (sched > now)
+			break;
+
+		FrameAction *action = it->second.get();
+
+		action->run();
+
+		it = actions_.erase(it);
+	}
+
+	updateDeadline();
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/rkisp1/timeline.h b/src/libcamera/pipeline/rkisp1/timeline.h
new file mode 100644
index 0000000000000000..9d30e4eaf8743d07
--- /dev/null
+++ b/src/libcamera/pipeline/rkisp1/timeline.h
@@ -0,0 +1,72 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * timeline.h - Timeline for per-frame controls
+ */
+#ifndef __LIBCAMERA_TIMELINE_H__
+#define __LIBCAMERA_TIMELINE_H__
+
+#include <list>
+#include <map>
+
+#include <libcamera/timer.h>
+
+#include "utils.h"
+
+namespace libcamera {
+
+class FrameAction
+{
+public:
+	FrameAction(unsigned int frame, unsigned int type)
+		: frame_(frame), type_(type) {}
+
+	virtual ~FrameAction() {}
+
+	unsigned int frame() const { return frame_; }
+	unsigned int type() const { return type_; }
+
+	virtual void run() = 0;
+
+private:
+	unsigned int frame_;
+	unsigned int type_;
+};
+
+class Timeline
+{
+public:
+	Timeline();
+	virtual ~Timeline() {}
+
+	virtual void reset();
+	virtual void scheduleAction(std::unique_ptr<FrameAction> action);
+	virtual void notifyStartOfExposure(unsigned int frame, utils::time_point time);
+
+	utils::duration frameInterval() const { return frameInterval_; }
+
+protected:
+	int frameOffset(unsigned int type) const;
+	utils::duration timeOffset(unsigned int type) const;
+
+	void setRawDelay(unsigned int type, int frame, utils::duration time);
+
+	std::map<unsigned int, std::pair<int, utils::duration>> delays_;
+
+private:
+	static constexpr unsigned int HISTORY_DEPTH = 10;
+
+	void timeout(Timer *timer);
+	void updateDeadline();
+
+	std::list<std::pair<unsigned int, utils::time_point>> history_;
+	std::multimap<utils::time_point, std::unique_ptr<FrameAction>> actions_;
+	utils::duration frameInterval_;
+
+	Timer timer_;
+};
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_TIMELINE_H__ */