[v5,5/5] libcamera: rkisp1: Plumb the ConverterDW100 converter
diff mbox series

Message ID 20240709202151.152289-6-umang.jain@ideasonboard.com
State Superseded
Headers show
Series
  • libcamera: rkisp1: Plumb the ConverterDW100 converter
Related show

Commit Message

Umang Jain July 9, 2024, 8:21 p.m. UTC
Plumb the ConverterDW100 converter in the rkisp1 pipeline handler.
If the dewarper is found, it is instantiated and buffers are exported
from it, instead of RkISP1Path. Internal buffers are allocated for the
RkISP1Path in case where dewarper is going to be used.

The RKISP1 pipeline handler now supports scaler crop control through
ConverterDW100. Register the ScalerCrop control for the cameras created
in the RKISP1 pipeline handler.

Signed-off-by: Umang Jain <umang.jain@ideasonboard.com>
---
 src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 148 +++++++++++++++++-
 src/libcamera/pipeline/rkisp1/rkisp1_path.cpp |  12 +-
 src/libcamera/pipeline/rkisp1/rkisp1_path.h   |  14 ++
 3 files changed, 165 insertions(+), 9 deletions(-)

Comments

Jacopo Mondi July 17, 2024, 1:10 p.m. UTC | #1
Hi Umang

On Wed, Jul 10, 2024 at 01:51:51AM GMT, Umang Jain wrote:
> Plumb the ConverterDW100 converter in the rkisp1 pipeline handler.
> If the dewarper is found, it is instantiated and buffers are exported
> from it, instead of RkISP1Path. Internal buffers are allocated for the
> RkISP1Path in case where dewarper is going to be used.
>
> The RKISP1 pipeline handler now supports scaler crop control through
> ConverterDW100. Register the ScalerCrop control for the cameras created
> in the RKISP1 pipeline handler.
>
> Signed-off-by: Umang Jain <umang.jain@ideasonboard.com>
> ---
>  src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 148 +++++++++++++++++-
>  src/libcamera/pipeline/rkisp1/rkisp1_path.cpp |  12 +-
>  src/libcamera/pipeline/rkisp1/rkisp1_path.h   |  14 ++
>  3 files changed, 165 insertions(+), 9 deletions(-)
>
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> index bfc44239..881e10e1 100644
> --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> @@ -8,9 +8,11 @@
>  #include <algorithm>
>  #include <array>
>  #include <iomanip>
> +#include <map>
>  #include <memory>
>  #include <numeric>
>  #include <queue>
> +#include <vector>
>
>  #include <linux/media-bus-format.h>
>  #include <linux/rkisp1-config.h>
> @@ -33,6 +35,7 @@
>
>  #include "libcamera/internal/camera.h"
>  #include "libcamera/internal/camera_sensor.h"
> +#include "libcamera/internal/converter/converter_dw100.h"
>  #include "libcamera/internal/delayed_controls.h"
>  #include "libcamera/internal/device_enumerator.h"
>  #include "libcamera/internal/framebuffer.h"
> @@ -62,6 +65,8 @@ struct RkISP1FrameInfo {
>
>  	bool paramDequeued;
>  	bool metadataProcessed;
> +
> +	std::optional<Rectangle> scalerCrop;
>  };
>
>  class RkISP1Frames
> @@ -181,6 +186,7 @@ private:
>  	void bufferReady(FrameBuffer *buffer);
>  	void paramReady(FrameBuffer *buffer);
>  	void statReady(FrameBuffer *buffer);
> +	void dewarpBufferReady(FrameBuffer *buffer);
>  	void frameStart(uint32_t sequence);
>
>  	int allocateBuffers(Camera *camera);
> @@ -200,6 +206,13 @@ private:
>  	RkISP1MainPath mainPath_;
>  	RkISP1SelfPath selfPath_;
>
> +	std::unique_ptr<ConverterDW100> dewarper_;
> +	bool useDewarper_;
> +
> +	/* Internal buffers used when dewarper is being used */
> +	std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
> +	std::queue<FrameBuffer *> availableMainPathBuffers_;
> +
>  	std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
>  	std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
>  	std::queue<FrameBuffer *> availableParamBuffers_;
> @@ -222,6 +235,8 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
>
>  	FrameBuffer *paramBuffer = nullptr;
>  	FrameBuffer *statBuffer = nullptr;
> +	FrameBuffer *mainPathBuffer = nullptr;
> +	FrameBuffer *selfPathBuffer = nullptr;
>
>  	if (!isRaw) {
>  		if (pipe_->availableParamBuffers_.empty()) {
> @@ -239,10 +254,16 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
>
>  		statBuffer = pipe_->availableStatBuffers_.front();
>  		pipe_->availableStatBuffers_.pop();
> +
> +		if (pipe_->useDewarper_) {
> +			mainPathBuffer = pipe_->availableMainPathBuffers_.front();

Can we exaust the availableMainPathBuffers_ queue? Should this be
checked for validity ?

> +			pipe_->availableMainPathBuffers_.pop();
> +		}
>  	}
>
> -	FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> -	FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> +	if (!mainPathBuffer)
> +		mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> +	selfPathBuffer = request->findBuffer(&data->selfPathStream_);
>
>  	RkISP1FrameInfo *info = new RkISP1FrameInfo;
>
> @@ -268,6 +289,7 @@ int RkISP1Frames::destroy(unsigned int frame)
>
>  	pipe_->availableParamBuffers_.push(info->paramBuffer);
>  	pipe_->availableStatBuffers_.push(info->statBuffer);
> +	pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
>
>  	frameInfo_.erase(info->frame);
>
> @@ -283,6 +305,7 @@ void RkISP1Frames::clear()
>
>  		pipe_->availableParamBuffers_.push(info->paramBuffer);
>  		pipe_->availableStatBuffers_.push(info->statBuffer);
> +		pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
>
>  		delete info;
>  	}
> @@ -607,7 +630,7 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
>   */
>
>  PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> -	: PipelineHandler(manager), hasSelfPath_(true)
> +	: PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
>  {
>  }
>
> @@ -785,12 +808,19 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
>  		<< " crop " << rect;
>
>  	std::map<unsigned int, IPAStream> streamConfig;
> +	std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
>
>  	for (const StreamConfiguration &cfg : *config) {
>  		if (cfg.stream() == &data->mainPathStream_) {
>  			ret = mainPath_.configure(cfg, format);
>  			streamConfig[0] = IPAStream(cfg.pixelFormat,
>  						    cfg.size);
> +			/* Configure dewarp */
> +			if (dewarper_ && !isRaw_) {
> +				outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));

Why a vector, can you have more than one ?

> +				ret = dewarper_->configure(cfg, outputCfgs);

Ah, the need to have a vector comes from the configure() signature

Seems like
				ret = dewarper_->configure(cfg, { cfg });

would do

> +				useDewarper_ = ret ? false : true;
> +			}
>  		} else if (hasSelfPath_) {
>  			ret = selfPath_.configure(cfg, format);
>  			streamConfig[1] = IPAStream(cfg.pixelFormat,
> @@ -839,6 +869,9 @@ int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
>  	RkISP1CameraData *data = cameraData(camera);
>  	unsigned int count = stream->configuration().bufferCount;
>
> +	if (useDewarper_)
> +		return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
> +

Unconditionally ? Even if the stream is the selfPath one ?

>  	if (stream == &data->mainPathStream_)
>  		return mainPath_.exportBuffers(count, buffers);
>  	else if (hasSelfPath_ && stream == &data->selfPathStream_)
> @@ -866,6 +899,16 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
>  		ret = stat_->allocateBuffers(maxCount, &statBuffers_);
>  		if (ret < 0)
>  			goto error;
> +
> +		/* If the dewarper is being used, allocate internal buffers for ISP */
> +		if (useDewarper_) {
> +			ret = mainPath_.allocateBuffers(maxCount, &mainPathBuffers_);
> +			if (ret < 0)
> +				goto error;

Is this right ?

It seems the model here is to allocate buffers in the main path and
import them in the dewarper.

allocateBuffer() creates and exports the buffers but sets the video
device in MMAP mode.

I would be expecting instead the main path to
1) export buffers
2) import them back to intialize the queue in DMABUF mode

From there buffers dequeued from the main path are queued to the
dewarper without any need to be mapped into a userspace accesible
address.

Am I confused maybe ?


> +
> +			for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
> +				availableMainPathBuffers_.push(buffer.get());
> +		}
>  	}
>
>  	for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
> @@ -889,6 +932,7 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
>  error:
>  	paramBuffers_.clear();
>  	statBuffers_.clear();
> +	mainPathBuffers_.clear();
>
>  	return ret;
>  }
> @@ -903,8 +947,12 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
>  	while (!availableParamBuffers_.empty())
>  		availableParamBuffers_.pop();
>
> +	while (!availableMainPathBuffers_.empty())
> +		availableMainPathBuffers_.pop();
> +
>  	paramBuffers_.clear();
>  	statBuffers_.clear();
> +	mainPathBuffers_.clear();
>
>  	std::vector<unsigned int> ids;
>  	for (IPABuffer &ipabuf : data->ipaBuffers_)
> @@ -919,6 +967,9 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
>  	if (stat_->releaseBuffers())
>  		LOG(RkISP1, Error) << "Failed to release stat buffers";
>
> +	if (useDewarper_)
> +		mainPath_.releaseBuffers();
> +
>  	return 0;
>  }
>
> @@ -961,6 +1012,14 @@ int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
>  				<< "Failed to start statistics " << camera->id();
>  			return ret;
>  		}
> +
> +		if (useDewarper_) {
> +			ret = dewarper_->start();
> +			if (ret) {
> +				LOG(RkISP1, Error) << "Failed to start dewarper";

The error handling path should undo all the previous actions

> +				return ret;
> +			}
> +		}
>  	}
>
>  	if (data->mainPath_->isEnabled()) {
> @@ -1015,6 +1074,9 @@ void PipelineHandlerRkISP1::stopDevice(Camera *camera)
>  		if (ret)
>  			LOG(RkISP1, Warning)
>  				<< "Failed to stop parameters for " << camera->id();
> +
> +		if (useDewarper_)
> +			dewarper_->stop();
>  	}
>
>  	ASSERT(data->queuedRequests_.empty());
> @@ -1045,6 +1107,25 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
>  					     info->paramBuffer->cookie());
>  	}
>
> +	const auto &crop = request->controls().get(controls::ScalerCrop);
> +	if (crop && useDewarper_) {
> +		Rectangle appliedRect = crop.value();
> +		int ret = dewarper_->setCrop(&data->mainPathStream_, &appliedRect);
> +		if (!ret) {
> +			if (appliedRect != crop.value()) {
> +				/*
> +				 * \todo How to handle these case?
> +				 * Do we aim for pixel perfect set rectangles?
> +				 */

I think this should be reported in metadata as it has been applied and
let the application decide what to do there

> +				LOG(RkISP1, Warning)
> +					<< "Applied rectangle " << appliedRect.toString()
> +					<< " differs from requested " << crop.value().toString();
> +			}
> +
> +			info->scalerCrop = appliedRect;
> +		}
> +	}
> +
>  	data->frame_++;
>
>  	return 0;
> @@ -1110,6 +1191,12 @@ int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
>  {
>  	ControlInfoMap::Map rkisp1Controls;
>
> +	if (dewarper_) {
> +		auto [minCrop, maxCrop] = dewarper_->getCropBounds(&data->mainPathStream_);
> +
> +		rkisp1Controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
> +	}
> +
>  	/* Add the IPA registered controls to list of camera controls. */
>  	for (const auto &ipaControl : data->ipaControls_)
>  		rkisp1Controls[ipaControl.first] = ipaControl.second;
> @@ -1173,6 +1260,7 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
>
>  bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>  {
> +	std::shared_ptr<MediaDevice> dwpMediaDevice;
>  	const MediaPad *pad;
>
>  	DeviceMatch dm("rkisp1");
> @@ -1237,6 +1325,26 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>  	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
>  	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
>
> +	/* If dewarper is present, create its instance. */
> +	DeviceMatch dwp("dw100");
> +	dwp.add("dw100-source");
> +	dwp.add("dw100-sink");
> +
> +	dwpMediaDevice = enumerator->search(dwp);
> +	if (dwpMediaDevice) {
> +		dewarper_ = std::make_unique<ConverterDW100>(std::move(dwpMediaDevice));
> +		if (dewarper_->isValid()) {
> +			dewarper_->outputBufferReady.connect(
> +				this, &PipelineHandlerRkISP1::dewarpBufferReady);
> +
> +			LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
> +		} else {
> +			LOG(RkISP1, Debug) << "Found DW100 dewarper " << dewarper_->deviceNode()
> +					   << " but invalid";
> +			dewarper_.reset();
> +		}
> +	}
> +
>  	/*
>  	 * Enumerate all sensors connected to the ISP and create one
>  	 * camera instance for each of them.
> @@ -1283,7 +1391,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
>  		return;
>
>  	const FrameMetadata &metadata = buffer->metadata();
> -	Request *request = buffer->request();
> +	Request *request = info->request;
>
>  	if (metadata.status != FrameMetadata::FrameCancelled) {
>  		/*
> @@ -1300,11 +1408,43 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
>  				data->delayedCtrls_->get(metadata.sequence);
>  			data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
>  		}
> +

rougue white line

>  	} else {
>  		if (isRaw_)
>  			info->metadataProcessed = true;
>  	}
>
> +	if (useDewarper_) {
> +		/*
> +		 * Queue input and output buffers to the dewarper. The output
> +		 * buffers for the dewarper are the buffers of the request, supplied
> +		 * by the application.
> +		 */
> +		int ret = dewarper_->queueBuffers(buffer, request->buffers());

What if buffer comes from the self path ?

> +		if (ret < 0)
> +			LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
> +					   << strerror(-ret);
> +
> +		return;
> +	}
> +
> +	completeBuffer(request, buffer);
> +	tryCompleteRequest(info);
> +}
> +
> +void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
> +{
> +	ASSERT(activeCamera_);
> +	RkISP1CameraData *data = cameraData(activeCamera_);
> +	Request *request = buffer->request();
> +
> +	RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
> +	if (!info)
> +		return;
> +
> +	if (info->scalerCrop)
> +		request->metadata().set(controls::ScalerCrop, info->scalerCrop.value());
> +
>  	completeBuffer(request, buffer);
>  	tryCompleteRequest(info);
>  }
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> index c49017d1..c96ec1d6 100644
> --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> @@ -56,7 +56,7 @@ const std::map<PixelFormat, uint32_t> formatToMediaBus = {
>
>  RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
>  		       const Size &minResolution, const Size &maxResolution)
> -	: name_(name), running_(false), formats_(formats),
> +	: name_(name), running_(false), internalBufs_(false), formats_(formats),
>  	  minResolution_(minResolution), maxResolution_(maxResolution),
>  	  link_(nullptr)
>  {
> @@ -402,10 +402,12 @@ int RkISP1Path::start()
>  	if (running_)
>  		return -EBUSY;
>
> -	/* \todo Make buffer count user configurable. */
> -	ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> -	if (ret)
> -		return ret;
> +	if (!internalBufs_) {

Would you still need this if you exportBuffers() from the main path ?

> +		/* \todo Make buffer count user configurable. */
> +		ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> +		if (ret)
> +			return ret;
> +	}
>
>  	ret = video_->streamOn();
>  	if (ret) {
> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> index 08edefec..b7fa82d0 100644
> --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> @@ -55,6 +55,19 @@ public:
>  		return video_->exportBuffers(bufferCount, buffers);
>  	}
>
> +	int allocateBuffers(unsigned int bufferCount,
> +			    std::vector<std::unique_ptr<FrameBuffer>> *buffers)
> +	{
> +		internalBufs_ = true;
> +		return video_->allocateBuffers(bufferCount, buffers);

What if allocateBuffers() fails ? You have sent the internalBufs_ flag
before this call.

> +	}
> +
> +	int releaseBuffers()
> +	{
> +		ASSERT(internalBufs_);
> +		return video_->releaseBuffers();
> +	}
> +

I think you should move these to the cpp file.

>  	int start();
>  	void stop();
>
> @@ -68,6 +81,7 @@ private:
>
>  	const char *name_;
>  	bool running_;
> +	bool internalBufs_;
>
>  	const Span<const PixelFormat> formats_;
>  	std::set<PixelFormat> streamFormats_;
> --
> 2.45.2
>
Umang Jain July 18, 2024, 5:22 a.m. UTC | #2
Hi Jacopo

On 17/07/24 6:40 pm, Jacopo Mondi wrote:
> Hi Umang
>
> On Wed, Jul 10, 2024 at 01:51:51AM GMT, Umang Jain wrote:
>> Plumb the ConverterDW100 converter in the rkisp1 pipeline handler.
>> If the dewarper is found, it is instantiated and buffers are exported
>> from it, instead of RkISP1Path. Internal buffers are allocated for the
>> RkISP1Path in case where dewarper is going to be used.
>>
>> The RKISP1 pipeline handler now supports scaler crop control through
>> ConverterDW100. Register the ScalerCrop control for the cameras created
>> in the RKISP1 pipeline handler.
>>
>> Signed-off-by: Umang Jain <umang.jain@ideasonboard.com>
>> ---
>>   src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 148 +++++++++++++++++-
>>   src/libcamera/pipeline/rkisp1/rkisp1_path.cpp |  12 +-
>>   src/libcamera/pipeline/rkisp1/rkisp1_path.h   |  14 ++
>>   3 files changed, 165 insertions(+), 9 deletions(-)
>>
>> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
>> index bfc44239..881e10e1 100644
>> --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
>> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
>> @@ -8,9 +8,11 @@
>>   #include <algorithm>
>>   #include <array>
>>   #include <iomanip>
>> +#include <map>
>>   #include <memory>
>>   #include <numeric>
>>   #include <queue>
>> +#include <vector>
>>
>>   #include <linux/media-bus-format.h>
>>   #include <linux/rkisp1-config.h>
>> @@ -33,6 +35,7 @@
>>
>>   #include "libcamera/internal/camera.h"
>>   #include "libcamera/internal/camera_sensor.h"
>> +#include "libcamera/internal/converter/converter_dw100.h"
>>   #include "libcamera/internal/delayed_controls.h"
>>   #include "libcamera/internal/device_enumerator.h"
>>   #include "libcamera/internal/framebuffer.h"
>> @@ -62,6 +65,8 @@ struct RkISP1FrameInfo {
>>
>>   	bool paramDequeued;
>>   	bool metadataProcessed;
>> +
>> +	std::optional<Rectangle> scalerCrop;
>>   };
>>
>>   class RkISP1Frames
>> @@ -181,6 +186,7 @@ private:
>>   	void bufferReady(FrameBuffer *buffer);
>>   	void paramReady(FrameBuffer *buffer);
>>   	void statReady(FrameBuffer *buffer);
>> +	void dewarpBufferReady(FrameBuffer *buffer);
>>   	void frameStart(uint32_t sequence);
>>
>>   	int allocateBuffers(Camera *camera);
>> @@ -200,6 +206,13 @@ private:
>>   	RkISP1MainPath mainPath_;
>>   	RkISP1SelfPath selfPath_;
>>
>> +	std::unique_ptr<ConverterDW100> dewarper_;
>> +	bool useDewarper_;
>> +
>> +	/* Internal buffers used when dewarper is being used */
>> +	std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
>> +	std::queue<FrameBuffer *> availableMainPathBuffers_;
>> +
>>   	std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
>>   	std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
>>   	std::queue<FrameBuffer *> availableParamBuffers_;
>> @@ -222,6 +235,8 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
>>
>>   	FrameBuffer *paramBuffer = nullptr;
>>   	FrameBuffer *statBuffer = nullptr;
>> +	FrameBuffer *mainPathBuffer = nullptr;
>> +	FrameBuffer *selfPathBuffer = nullptr;
>>
>>   	if (!isRaw) {
>>   		if (pipe_->availableParamBuffers_.empty()) {
>> @@ -239,10 +254,16 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
>>
>>   		statBuffer = pipe_->availableStatBuffers_.front();
>>   		pipe_->availableStatBuffers_.pop();
>> +
>> +		if (pipe_->useDewarper_) {
>> +			mainPathBuffer = pipe_->availableMainPathBuffers_.front();
> Can we exaust the availableMainPathBuffers_ queue? Should this be
> checked for validity ?

Seems rule to be applied for all available*Buffers_ in rkisp1 pipeline 
handler?

>
>> +			pipe_->availableMainPathBuffers_.pop();
>> +		}
>>   	}
>>
>> -	FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
>> -	FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
>> +	if (!mainPathBuffer)
>> +		mainPathBuffer = request->findBuffer(&data->mainPathStream_);
>> +	selfPathBuffer = request->findBuffer(&data->selfPathStream_);
>>
>>   	RkISP1FrameInfo *info = new RkISP1FrameInfo;
>>
>> @@ -268,6 +289,7 @@ int RkISP1Frames::destroy(unsigned int frame)
>>
>>   	pipe_->availableParamBuffers_.push(info->paramBuffer);
>>   	pipe_->availableStatBuffers_.push(info->statBuffer);
>> +	pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
>>
>>   	frameInfo_.erase(info->frame);
>>
>> @@ -283,6 +305,7 @@ void RkISP1Frames::clear()
>>
>>   		pipe_->availableParamBuffers_.push(info->paramBuffer);
>>   		pipe_->availableStatBuffers_.push(info->statBuffer);
>> +		pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
>>
>>   		delete info;
>>   	}
>> @@ -607,7 +630,7 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
>>    */
>>
>>   PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
>> -	: PipelineHandler(manager), hasSelfPath_(true)
>> +	: PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
>>   {
>>   }
>>
>> @@ -785,12 +808,19 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
>>   		<< " crop " << rect;
>>
>>   	std::map<unsigned int, IPAStream> streamConfig;
>> +	std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
>>
>>   	for (const StreamConfiguration &cfg : *config) {
>>   		if (cfg.stream() == &data->mainPathStream_) {
>>   			ret = mainPath_.configure(cfg, format);
>>   			streamConfig[0] = IPAStream(cfg.pixelFormat,
>>   						    cfg.size);
>> +			/* Configure dewarp */
>> +			if (dewarper_ && !isRaw_) {
>> +				outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));
> Why a vector, can you have more than one ?

converter can have more than one outputs yes
>
>> +				ret = dewarper_->configure(cfg, outputCfgs);
> Ah, the need to have a vector comes from the configure() signature
>
> Seems like
> 				ret = dewarper_->configure(cfg, { cfg });
>
> would do

const is a problem here I think

>
>> +				useDewarper_ = ret ? false : true;
>> +			}
>>   		} else if (hasSelfPath_) {
>>   			ret = selfPath_.configure(cfg, format);
>>   			streamConfig[1] = IPAStream(cfg.pixelFormat,
>> @@ -839,6 +869,9 @@ int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
>>   	RkISP1CameraData *data = cameraData(camera);
>>   	unsigned int count = stream->configuration().bufferCount;
>>
>> +	if (useDewarper_)
>> +		return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
>> +
> Unconditionally ? Even if the stream is the selfPath one ?

dewarper only works with mainPath_ I believe.

The i.MX8MP doesn't have selfPath as far as I know? Paul, can you 
correct me on this - I think we had a discussion regarding that.

>
>>   	if (stream == &data->mainPathStream_)
>>   		return mainPath_.exportBuffers(count, buffers);
>>   	else if (hasSelfPath_ && stream == &data->selfPathStream_)
>> @@ -866,6 +899,16 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
>>   		ret = stat_->allocateBuffers(maxCount, &statBuffers_);
>>   		if (ret < 0)
>>   			goto error;
>> +
>> +		/* If the dewarper is being used, allocate internal buffers for ISP */
>> +		if (useDewarper_) {
>> +			ret = mainPath_.allocateBuffers(maxCount, &mainPathBuffers_);
>> +			if (ret < 0)
>> +				goto error;
> Is this right ?
>
> It seems the model here is to allocate buffers in the main path and
> import them in the dewarper.

The model here is to simply export the buffers from dewarper and 
allocate internal buffers for RkISP1Path

These internal buffers allocated (availableMainPathBuffers_), are then 
queued to the dewarper.

>
> allocateBuffer() creates and exports the buffers but sets the video
> device in MMAP mode.
>
> I would be expecting instead the main path to
> 1) export buffers
> 2) import them back to intialize the queue in DMABUF mode
>
>  From there buffers dequeued from the main path are queued to the
> dewarper without any need to be mapped into a userspace accesible
> address.
>
> Am I confused maybe ?

Now I'm confused with the above strategy. Do you mean that export 
buffers from mainPath and have internal buffers allocated in the dewarper?

When you say,

2) import them back to intialize the queue in DMABUF mode

Import them back where?

But certainly it's a worth while idea to have them exported from main 
path and also queue to it the dewarper. Would require some 
experimentation from my side.

>
>
>> +
>> +			for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
>> +				availableMainPathBuffers_.push(buffer.get());
>> +		}
>>   	}
>>
>>   	for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
>> @@ -889,6 +932,7 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
>>   error:
>>   	paramBuffers_.clear();
>>   	statBuffers_.clear();
>> +	mainPathBuffers_.clear();
>>
>>   	return ret;
>>   }
>> @@ -903,8 +947,12 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
>>   	while (!availableParamBuffers_.empty())
>>   		availableParamBuffers_.pop();
>>
>> +	while (!availableMainPathBuffers_.empty())
>> +		availableMainPathBuffers_.pop();
>> +
>>   	paramBuffers_.clear();
>>   	statBuffers_.clear();
>> +	mainPathBuffers_.clear();
>>
>>   	std::vector<unsigned int> ids;
>>   	for (IPABuffer &ipabuf : data->ipaBuffers_)
>> @@ -919,6 +967,9 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
>>   	if (stat_->releaseBuffers())
>>   		LOG(RkISP1, Error) << "Failed to release stat buffers";
>>
>> +	if (useDewarper_)
>> +		mainPath_.releaseBuffers();
>> +
>>   	return 0;
>>   }
>>
>> @@ -961,6 +1012,14 @@ int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
>>   				<< "Failed to start statistics " << camera->id();
>>   			return ret;
>>   		}
>> +
>> +		if (useDewarper_) {
>> +			ret = dewarper_->start();
>> +			if (ret) {
>> +				LOG(RkISP1, Error) << "Failed to start dewarper";
> The error handling path should undo all the previous actions
>
>> +				return ret;
>> +			}
>> +		}
>>   	}
>>
>>   	if (data->mainPath_->isEnabled()) {
>> @@ -1015,6 +1074,9 @@ void PipelineHandlerRkISP1::stopDevice(Camera *camera)
>>   		if (ret)
>>   			LOG(RkISP1, Warning)
>>   				<< "Failed to stop parameters for " << camera->id();
>> +
>> +		if (useDewarper_)
>> +			dewarper_->stop();
>>   	}
>>
>>   	ASSERT(data->queuedRequests_.empty());
>> @@ -1045,6 +1107,25 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
>>   					     info->paramBuffer->cookie());
>>   	}
>>
>> +	const auto &crop = request->controls().get(controls::ScalerCrop);
>> +	if (crop && useDewarper_) {
>> +		Rectangle appliedRect = crop.value();
>> +		int ret = dewarper_->setCrop(&data->mainPathStream_, &appliedRect);
>> +		if (!ret) {
>> +			if (appliedRect != crop.value()) {
>> +				/*
>> +				 * \todo How to handle these case?
>> +				 * Do we aim for pixel perfect set rectangles?
>> +				 */
> I think this should be reported in metadata as it has been applied and
> let the application decide what to do there

ack
>
>> +				LOG(RkISP1, Warning)
>> +					<< "Applied rectangle " << appliedRect.toString()
>> +					<< " differs from requested " << crop.value().toString();
>> +			}
>> +
>> +			info->scalerCrop = appliedRect;
>> +		}
>> +	}
>> +
>>   	data->frame_++;
>>
>>   	return 0;
>> @@ -1110,6 +1191,12 @@ int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
>>   {
>>   	ControlInfoMap::Map rkisp1Controls;
>>
>> +	if (dewarper_) {
>> +		auto [minCrop, maxCrop] = dewarper_->getCropBounds(&data->mainPathStream_);
>> +
>> +		rkisp1Controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
>> +	}
>> +
>>   	/* Add the IPA registered controls to list of camera controls. */
>>   	for (const auto &ipaControl : data->ipaControls_)
>>   		rkisp1Controls[ipaControl.first] = ipaControl.second;
>> @@ -1173,6 +1260,7 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
>>
>>   bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>>   {
>> +	std::shared_ptr<MediaDevice> dwpMediaDevice;
>>   	const MediaPad *pad;
>>
>>   	DeviceMatch dm("rkisp1");
>> @@ -1237,6 +1325,26 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
>>   	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
>>   	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
>>
>> +	/* If dewarper is present, create its instance. */
>> +	DeviceMatch dwp("dw100");
>> +	dwp.add("dw100-source");
>> +	dwp.add("dw100-sink");
>> +
>> +	dwpMediaDevice = enumerator->search(dwp);
>> +	if (dwpMediaDevice) {
>> +		dewarper_ = std::make_unique<ConverterDW100>(std::move(dwpMediaDevice));
>> +		if (dewarper_->isValid()) {
>> +			dewarper_->outputBufferReady.connect(
>> +				this, &PipelineHandlerRkISP1::dewarpBufferReady);
>> +
>> +			LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
>> +		} else {
>> +			LOG(RkISP1, Debug) << "Found DW100 dewarper " << dewarper_->deviceNode()
>> +					   << " but invalid";
>> +			dewarper_.reset();
>> +		}
>> +	}
>> +
>>   	/*
>>   	 * Enumerate all sensors connected to the ISP and create one
>>   	 * camera instance for each of them.
>> @@ -1283,7 +1391,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
>>   		return;
>>
>>   	const FrameMetadata &metadata = buffer->metadata();
>> -	Request *request = buffer->request();
>> +	Request *request = info->request;
>>
>>   	if (metadata.status != FrameMetadata::FrameCancelled) {
>>   		/*
>> @@ -1300,11 +1408,43 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
>>   				data->delayedCtrls_->get(metadata.sequence);
>>   			data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
>>   		}
>> +
> rougue white line
>
>>   	} else {
>>   		if (isRaw_)
>>   			info->metadataProcessed = true;
>>   	}
>>
>> +	if (useDewarper_) {
>> +		/*
>> +		 * Queue input and output buffers to the dewarper. The output
>> +		 * buffers for the dewarper are the buffers of the request, supplied
>> +		 * by the application.
>> +		 */
>> +		int ret = dewarper_->queueBuffers(buffer, request->buffers());
> What if buffer comes from the self path ?

I am yet to become aware of a i.MX8MP platform where dewarper is present 
- along with main and self paths.

>
>> +		if (ret < 0)
>> +			LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
>> +					   << strerror(-ret);
>> +
>> +		return;
>> +	}
>> +
>> +	completeBuffer(request, buffer);
>> +	tryCompleteRequest(info);
>> +}
>> +
>> +void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
>> +{
>> +	ASSERT(activeCamera_);
>> +	RkISP1CameraData *data = cameraData(activeCamera_);
>> +	Request *request = buffer->request();
>> +
>> +	RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
>> +	if (!info)
>> +		return;
>> +
>> +	if (info->scalerCrop)
>> +		request->metadata().set(controls::ScalerCrop, info->scalerCrop.value());
>> +
>>   	completeBuffer(request, buffer);
>>   	tryCompleteRequest(info);
>>   }
>> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
>> index c49017d1..c96ec1d6 100644
>> --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
>> +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
>> @@ -56,7 +56,7 @@ const std::map<PixelFormat, uint32_t> formatToMediaBus = {
>>
>>   RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
>>   		       const Size &minResolution, const Size &maxResolution)
>> -	: name_(name), running_(false), formats_(formats),
>> +	: name_(name), running_(false), internalBufs_(false), formats_(formats),
>>   	  minResolution_(minResolution), maxResolution_(maxResolution),
>>   	  link_(nullptr)
>>   {
>> @@ -402,10 +402,12 @@ int RkISP1Path::start()
>>   	if (running_)
>>   		return -EBUSY;
>>
>> -	/* \todo Make buffer count user configurable. */
>> -	ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
>> -	if (ret)
>> -		return ret;
>> +	if (!internalBufs_) {
> Would you still need this if you exportBuffers() from the main path ?
>
>> +		/* \todo Make buffer count user configurable. */
>> +		ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
>> +		if (ret)
>> +			return ret;
>> +	}
>>
>>   	ret = video_->streamOn();
>>   	if (ret) {
>> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
>> index 08edefec..b7fa82d0 100644
>> --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
>> +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
>> @@ -55,6 +55,19 @@ public:
>>   		return video_->exportBuffers(bufferCount, buffers);
>>   	}
>>
>> +	int allocateBuffers(unsigned int bufferCount,
>> +			    std::vector<std::unique_ptr<FrameBuffer>> *buffers)
>> +	{
>> +		internalBufs_ = true;
>> +		return video_->allocateBuffers(bufferCount, buffers);
> What if allocateBuffers() fails ? You have sent the internalBufs_ flag
> before this call.
>
>> +	}
>> +
>> +	int releaseBuffers()
>> +	{
>> +		ASSERT(internalBufs_);
>> +		return video_->releaseBuffers();
>> +	}
>> +
> I think you should move these to the cpp file.
>
>>   	int start();
>>   	void stop();
>>
>> @@ -68,6 +81,7 @@ private:
>>
>>   	const char *name_;
>>   	bool running_;
>> +	bool internalBufs_;
>>
>>   	const Span<const PixelFormat> formats_;
>>   	std::set<PixelFormat> streamFormats_;
>> --
>> 2.45.2
>>
Jacopo Mondi July 18, 2024, 9:54 a.m. UTC | #3
Hi Umang

On Thu, Jul 18, 2024 at 10:52:35AM GMT, Umang Jain wrote:
> Hi Jacopo
>
> On 17/07/24 6:40 pm, Jacopo Mondi wrote:
> > Hi Umang
> >
> > On Wed, Jul 10, 2024 at 01:51:51AM GMT, Umang Jain wrote:
> > > Plumb the ConverterDW100 converter in the rkisp1 pipeline handler.
> > > If the dewarper is found, it is instantiated and buffers are exported
> > > from it, instead of RkISP1Path. Internal buffers are allocated for the
> > > RkISP1Path in case where dewarper is going to be used.
> > >
> > > The RKISP1 pipeline handler now supports scaler crop control through
> > > ConverterDW100. Register the ScalerCrop control for the cameras created
> > > in the RKISP1 pipeline handler.
> > >
> > > Signed-off-by: Umang Jain <umang.jain@ideasonboard.com>
> > > ---
> > >   src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 148 +++++++++++++++++-
> > >   src/libcamera/pipeline/rkisp1/rkisp1_path.cpp |  12 +-
> > >   src/libcamera/pipeline/rkisp1/rkisp1_path.h   |  14 ++
> > >   3 files changed, 165 insertions(+), 9 deletions(-)
> > >
> > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > index bfc44239..881e10e1 100644
> > > --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > @@ -8,9 +8,11 @@
> > >   #include <algorithm>
> > >   #include <array>
> > >   #include <iomanip>
> > > +#include <map>
> > >   #include <memory>
> > >   #include <numeric>
> > >   #include <queue>
> > > +#include <vector>
> > >
> > >   #include <linux/media-bus-format.h>
> > >   #include <linux/rkisp1-config.h>
> > > @@ -33,6 +35,7 @@
> > >
> > >   #include "libcamera/internal/camera.h"
> > >   #include "libcamera/internal/camera_sensor.h"
> > > +#include "libcamera/internal/converter/converter_dw100.h"
> > >   #include "libcamera/internal/delayed_controls.h"
> > >   #include "libcamera/internal/device_enumerator.h"
> > >   #include "libcamera/internal/framebuffer.h"
> > > @@ -62,6 +65,8 @@ struct RkISP1FrameInfo {
> > >
> > >   	bool paramDequeued;
> > >   	bool metadataProcessed;
> > > +
> > > +	std::optional<Rectangle> scalerCrop;
> > >   };
> > >
> > >   class RkISP1Frames
> > > @@ -181,6 +186,7 @@ private:
> > >   	void bufferReady(FrameBuffer *buffer);
> > >   	void paramReady(FrameBuffer *buffer);
> > >   	void statReady(FrameBuffer *buffer);
> > > +	void dewarpBufferReady(FrameBuffer *buffer);
> > >   	void frameStart(uint32_t sequence);
> > >
> > >   	int allocateBuffers(Camera *camera);
> > > @@ -200,6 +206,13 @@ private:
> > >   	RkISP1MainPath mainPath_;
> > >   	RkISP1SelfPath selfPath_;
> > >
> > > +	std::unique_ptr<ConverterDW100> dewarper_;
> > > +	bool useDewarper_;
> > > +
> > > +	/* Internal buffers used when dewarper is being used */
> > > +	std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
> > > +	std::queue<FrameBuffer *> availableMainPathBuffers_;
> > > +
> > >   	std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
> > >   	std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
> > >   	std::queue<FrameBuffer *> availableParamBuffers_;
> > > @@ -222,6 +235,8 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> > >
> > >   	FrameBuffer *paramBuffer = nullptr;
> > >   	FrameBuffer *statBuffer = nullptr;
> > > +	FrameBuffer *mainPathBuffer = nullptr;
> > > +	FrameBuffer *selfPathBuffer = nullptr;
> > >
> > >   	if (!isRaw) {
> > >   		if (pipe_->availableParamBuffers_.empty()) {
> > > @@ -239,10 +254,16 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> > >
> > >   		statBuffer = pipe_->availableStatBuffers_.front();
> > >   		pipe_->availableStatBuffers_.pop();
> > > +
> > > +		if (pipe_->useDewarper_) {
> > > +			mainPathBuffer = pipe_->availableMainPathBuffers_.front();
> > Can we exaust the availableMainPathBuffers_ queue? Should this be
> > checked for validity ?
>
> Seems rule to be applied for all available*Buffers_ in rkisp1 pipeline
> handler?
>

Should they all be fixed then ? :)

> >
> > > +			pipe_->availableMainPathBuffers_.pop();
> > > +		}
> > >   	}
> > >
> > > -	FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> > > -	FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> > > +	if (!mainPathBuffer)
> > > +		mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> > > +	selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> > >
> > >   	RkISP1FrameInfo *info = new RkISP1FrameInfo;
> > >
> > > @@ -268,6 +289,7 @@ int RkISP1Frames::destroy(unsigned int frame)
> > >
> > >   	pipe_->availableParamBuffers_.push(info->paramBuffer);
> > >   	pipe_->availableStatBuffers_.push(info->statBuffer);
> > > +	pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> > >
> > >   	frameInfo_.erase(info->frame);
> > >
> > > @@ -283,6 +305,7 @@ void RkISP1Frames::clear()
> > >
> > >   		pipe_->availableParamBuffers_.push(info->paramBuffer);
> > >   		pipe_->availableStatBuffers_.push(info->statBuffer);
> > > +		pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> > >
> > >   		delete info;
> > >   	}
> > > @@ -607,7 +630,7 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
> > >    */
> > >
> > >   PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> > > -	: PipelineHandler(manager), hasSelfPath_(true)
> > > +	: PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
> > >   {
> > >   }
> > >
> > > @@ -785,12 +808,19 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
> > >   		<< " crop " << rect;
> > >
> > >   	std::map<unsigned int, IPAStream> streamConfig;
> > > +	std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
> > >
> > >   	for (const StreamConfiguration &cfg : *config) {
> > >   		if (cfg.stream() == &data->mainPathStream_) {
> > >   			ret = mainPath_.configure(cfg, format);
> > >   			streamConfig[0] = IPAStream(cfg.pixelFormat,
> > >   						    cfg.size);
> > > +			/* Configure dewarp */
> > > +			if (dewarper_ && !isRaw_) {
> > > +				outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));
> > Why a vector, can you have more than one ?
>
> converter can have more than one outputs yes

Maybe in the general case. Here it doesn't seem you can have more than one
config that gets applied to the dewarper

> >
> > > +				ret = dewarper_->configure(cfg, outputCfgs);
> > Ah, the need to have a vector comes from the configure() signature
> >
> > Seems like
> > 				ret = dewarper_->configure(cfg, { cfg });
> >
> > would do
>
> const is a problem here I think
>

compiles here, not sent through the CI loop.

Have you had any compiler error with the above ?

> >
> > > +				useDewarper_ = ret ? false : true;
> > > +			}
> > >   		} else if (hasSelfPath_) {
> > >   			ret = selfPath_.configure(cfg, format);
> > >   			streamConfig[1] = IPAStream(cfg.pixelFormat,
> > > @@ -839,6 +869,9 @@ int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
> > >   	RkISP1CameraData *data = cameraData(camera);
> > >   	unsigned int count = stream->configuration().bufferCount;
> > >
> > > +	if (useDewarper_)
> > > +		return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
> > > +
> > Unconditionally ? Even if the stream is the selfPath one ?
>
> dewarper only works with mainPath_ I believe.

Is it a pipeline handler design choice or an HW limitation ?

>
> The i.MX8MP doesn't have selfPath as far as I know? Paul, can you correct me
> on this - I think we had a discussion regarding that.
>

8mp doesn't have a self path
rkisp doesn't have a dewarper

so this seems to be safe -for now-

> >
> > >   	if (stream == &data->mainPathStream_)
> > >   		return mainPath_.exportBuffers(count, buffers);
> > >   	else if (hasSelfPath_ && stream == &data->selfPathStream_)
> > > @@ -866,6 +899,16 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> > >   		ret = stat_->allocateBuffers(maxCount, &statBuffers_);
> > >   		if (ret < 0)
> > >   			goto error;
> > > +
> > > +		/* If the dewarper is being used, allocate internal buffers for ISP */
> > > +		if (useDewarper_) {
> > > +			ret = mainPath_.allocateBuffers(maxCount, &mainPathBuffers_);
> > > +			if (ret < 0)
> > > +				goto error;
> > Is this right ?
> >
> > It seems the model here is to allocate buffers in the main path and
> > import them in the dewarper.
>
> The model here is to simply export the buffers from dewarper and allocate
> internal buffers for RkISP1Path

Yes and I wonder if it is correct.

I'm not talking about buffers exported from the dewarper to the
application (the one created by exportFrameBuffers() which only serves
for the FrameBufferAllocator to provide FrameBuffers from the
application).

I'm talking about the buffers between the main path and the dewarper.

>
> These internal buffers allocated (availableMainPathBuffers_), are then
> queued to the dewarper.
>
> >
> > allocateBuffer() creates and exports the buffers but sets the video
> > device in MMAP mode.
> >
> > I would be expecting instead the main path to
> > 1) export buffers
> > 2) import them back to intialize the queue in DMABUF mode
> >
> >  From there buffers dequeued from the main path are queued to the
> > dewarper without any need to be mapped into a userspace accesible
> > address.
> >
> > Am I confused maybe ?
>
> Now I'm confused with the above strategy. Do you mean that export buffers
> from mainPath and have internal buffers allocated in the dewarper?

I mean allocating memory in the main path and pass them to the
dewarper as dmabufs, which is what allocateBuffers() does, but
initializes the queue in MMAP mode, where you would only need DMABUF.

>
> When you say,
>
> 2) import them back to intialize the queue in DMABUF mode
>
> Import them back where?

In the mainpath itself. See how the CIO2 does that.

int CIO2Device::start()
{
	int ret = output_->exportBuffers(kBufferCount, &buffers_);
	if (ret < 0)
		return ret;

	ret = output_->importBuffers(kBufferCount);
	if (ret)
		LOG(IPU3, Error) << "Failed to import CIO2 buffers";


The main idea is that both allocateBuffers() and exportBuffers()
actually allocate memory on the device on which those functions are
called. The difference is the memory type used by the videobuf2 queue.
importBuffers() instead does not allocate memory on the device but
initializes its memory type to DMABUF and prepares it to receive
queueBuffer() calls.

With allocateBuffers() the memory type is set to MMAP, with
exportBuffers() is not initialized, so you have to later call
either allocateBuffers() or importBuffers() to initialize it.

As V4L2M2MConverter import buffers at start() time

int V4L2M2MConverter::V4L2M2MStream::start()
{
	int ret = m2m_->output()->importBuffers(inputBufferCount_);
	if (ret < 0)
		return ret;

	ret = m2m_->capture()->importBuffers(outputBufferCount_);
	if (ret < 0) {
		stop();
		return ret;
	}

I wonder if it is necessary to set the mainpath in MMAP mode or it can
operate in dmabuf mode only by combining exportBuffers() +
importBuffers(). To be honest the implications in videobuf2 of using
one memory type or the other are not 100% clear to me yet.

Reading the V4L2VideoDevice documentation, however, allocateBuffers()
seems to be the preferred way to implement internal buffer pools
between video devices, so maybe I'm totally off-track here

 *
 * The V4L2VideoDevice class supports the V4L2 MMAP and DMABUF memory types:
 *
 * - The allocateBuffers() function wraps buffer allocation with the V4L2 MMAP
 *   memory type. It requests buffers from the driver, allocating the
 *   corresponding memory, and exports them as a set of FrameBuffer objects.
 *   Upon successful return the driver's internal buffer management is
 *   initialized in MMAP mode, and the video device is ready to accept
 *   queueBuffer() calls.
 *
 *   This is the most traditional V4L2 buffer management, and is mostly useful
 *   to support internal buffer pools in pipeline handlers, either for CPU
 *   consumption (such as statistics or parameters pools), or for internal
 *   image buffers shared between devices.
 *
 * - The exportBuffers() function operates similarly to allocateBuffers(), but
 *   leaves the driver's internal buffer management uninitialized. It uses the
 *   V4L2 buffer orphaning support to allocate buffers with the MMAP method,
 *   export them as a set of FrameBuffer objects, and reset the driver's
 *   internal buffer management. The video device shall be initialized with
 *   importBuffers() or allocateBuffers() before it can accept queueBuffer()
 *   calls. The exported buffers are directly usable with any V4L2 video device
 *   in DMABUF mode, or with other dmabuf importers.
 *
 *   This method is mostly useful to implement buffer allocation helpers or to
 *   allocate ancillary buffers, when a V4L2 video device is used in DMABUF
 *   mode but no other source of buffers is available. An example use case
 *   would be allocation of scratch buffers to be used in case of buffer
 *   underruns on a video device that is otherwise supplied with external
 *   buffers.
 *
 * - The importBuffers() function initializes the driver's buffer management to
 *   import buffers in DMABUF mode. It requests buffers from the driver, but
 *   doesn't allocate memory. Upon successful return, the video device is ready
 *   to accept queueBuffer() calls. The buffers to be imported are provided to
 *   queueBuffer(), and may be supplied externally, or come from a previous
 *   exportBuffers() call.
 *
 *   This is the usual buffers initialization method for video devices whose
 *   buffers are exposed outside of libcamera. It is also typically used on one
 *   of the two video device that participate in buffer sharing inside
 *   pipelines, the other video device typically using allocateBuffers().
 *

>
> But certainly it's a worth while idea to have them exported from main path
> and also queue to it the dewarper. Would require some experimentation from
> my side.
>
> >
> >
> > > +
> > > +			for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
> > > +				availableMainPathBuffers_.push(buffer.get());
> > > +		}
> > >   	}
> > >
> > >   	for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
> > > @@ -889,6 +932,7 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> > >   error:
> > >   	paramBuffers_.clear();
> > >   	statBuffers_.clear();
> > > +	mainPathBuffers_.clear();
> > >
> > >   	return ret;
> > >   }
> > > @@ -903,8 +947,12 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
> > >   	while (!availableParamBuffers_.empty())
> > >   		availableParamBuffers_.pop();
> > >
> > > +	while (!availableMainPathBuffers_.empty())
> > > +		availableMainPathBuffers_.pop();
> > > +
> > >   	paramBuffers_.clear();
> > >   	statBuffers_.clear();
> > > +	mainPathBuffers_.clear();
> > >
> > >   	std::vector<unsigned int> ids;
> > >   	for (IPABuffer &ipabuf : data->ipaBuffers_)
> > > @@ -919,6 +967,9 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
> > >   	if (stat_->releaseBuffers())
> > >   		LOG(RkISP1, Error) << "Failed to release stat buffers";
> > >
> > > +	if (useDewarper_)
> > > +		mainPath_.releaseBuffers();
> > > +
> > >   	return 0;
> > >   }
> > >
> > > @@ -961,6 +1012,14 @@ int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
> > >   				<< "Failed to start statistics " << camera->id();
> > >   			return ret;
> > >   		}
> > > +
> > > +		if (useDewarper_) {
> > > +			ret = dewarper_->start();
> > > +			if (ret) {
> > > +				LOG(RkISP1, Error) << "Failed to start dewarper";
> > The error handling path should undo all the previous actions
> >
> > > +				return ret;
> > > +			}
> > > +		}
> > >   	}
> > >
> > >   	if (data->mainPath_->isEnabled()) {
> > > @@ -1015,6 +1074,9 @@ void PipelineHandlerRkISP1::stopDevice(Camera *camera)
> > >   		if (ret)
> > >   			LOG(RkISP1, Warning)
> > >   				<< "Failed to stop parameters for " << camera->id();
> > > +
> > > +		if (useDewarper_)
> > > +			dewarper_->stop();
> > >   	}
> > >
> > >   	ASSERT(data->queuedRequests_.empty());
> > > @@ -1045,6 +1107,25 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
> > >   					     info->paramBuffer->cookie());
> > >   	}
> > >
> > > +	const auto &crop = request->controls().get(controls::ScalerCrop);
> > > +	if (crop && useDewarper_) {
> > > +		Rectangle appliedRect = crop.value();
> > > +		int ret = dewarper_->setCrop(&data->mainPathStream_, &appliedRect);
> > > +		if (!ret) {
> > > +			if (appliedRect != crop.value()) {
> > > +				/*
> > > +				 * \todo How to handle these case?
> > > +				 * Do we aim for pixel perfect set rectangles?
> > > +				 */
> > I think this should be reported in metadata as it has been applied and
> > let the application decide what to do there
>
> ack
> >
> > > +				LOG(RkISP1, Warning)
> > > +					<< "Applied rectangle " << appliedRect.toString()
> > > +					<< " differs from requested " << crop.value().toString();
> > > +			}
> > > +
> > > +			info->scalerCrop = appliedRect;
> > > +		}
> > > +	}
> > > +
> > >   	data->frame_++;
> > >
> > >   	return 0;
> > > @@ -1110,6 +1191,12 @@ int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
> > >   {
> > >   	ControlInfoMap::Map rkisp1Controls;
> > >
> > > +	if (dewarper_) {
> > > +		auto [minCrop, maxCrop] = dewarper_->getCropBounds(&data->mainPathStream_);
> > > +
> > > +		rkisp1Controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
> > > +	}
> > > +
> > >   	/* Add the IPA registered controls to list of camera controls. */
> > >   	for (const auto &ipaControl : data->ipaControls_)
> > >   		rkisp1Controls[ipaControl.first] = ipaControl.second;
> > > @@ -1173,6 +1260,7 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> > >
> > >   bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > >   {
> > > +	std::shared_ptr<MediaDevice> dwpMediaDevice;
> > >   	const MediaPad *pad;
> > >
> > >   	DeviceMatch dm("rkisp1");
> > > @@ -1237,6 +1325,26 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > >   	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
> > >   	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
> > >
> > > +	/* If dewarper is present, create its instance. */
> > > +	DeviceMatch dwp("dw100");
> > > +	dwp.add("dw100-source");
> > > +	dwp.add("dw100-sink");
> > > +
> > > +	dwpMediaDevice = enumerator->search(dwp);
> > > +	if (dwpMediaDevice) {
> > > +		dewarper_ = std::make_unique<ConverterDW100>(std::move(dwpMediaDevice));
> > > +		if (dewarper_->isValid()) {
> > > +			dewarper_->outputBufferReady.connect(
> > > +				this, &PipelineHandlerRkISP1::dewarpBufferReady);
> > > +
> > > +			LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
> > > +		} else {
> > > +			LOG(RkISP1, Debug) << "Found DW100 dewarper " << dewarper_->deviceNode()
> > > +					   << " but invalid";
> > > +			dewarper_.reset();
> > > +		}
> > > +	}
> > > +
> > >   	/*
> > >   	 * Enumerate all sensors connected to the ISP and create one
> > >   	 * camera instance for each of them.
> > > @@ -1283,7 +1391,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> > >   		return;
> > >
> > >   	const FrameMetadata &metadata = buffer->metadata();
> > > -	Request *request = buffer->request();
> > > +	Request *request = info->request;
> > >
> > >   	if (metadata.status != FrameMetadata::FrameCancelled) {
> > >   		/*
> > > @@ -1300,11 +1408,43 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> > >   				data->delayedCtrls_->get(metadata.sequence);
> > >   			data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
> > >   		}
> > > +
> > rougue white line
> >
> > >   	} else {
> > >   		if (isRaw_)
> > >   			info->metadataProcessed = true;
> > >   	}
> > >
> > > +	if (useDewarper_) {
> > > +		/*
> > > +		 * Queue input and output buffers to the dewarper. The output
> > > +		 * buffers for the dewarper are the buffers of the request, supplied
> > > +		 * by the application.
> > > +		 */
> > > +		int ret = dewarper_->queueBuffers(buffer, request->buffers());
> > What if buffer comes from the self path ?
>
> I am yet to become aware of a i.MX8MP platform where dewarper is present -
> along with main and self paths.
>

Ok, as said above if 8mp has no self path and rkisp1 has no dewarper
(but a self path) we're safe -for now-

> >
> > > +		if (ret < 0)
> > > +			LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
> > > +					   << strerror(-ret);
> > > +
> > > +		return;
> > > +	}
> > > +
> > > +	completeBuffer(request, buffer);
> > > +	tryCompleteRequest(info);
> > > +}
> > > +
> > > +void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
> > > +{
> > > +	ASSERT(activeCamera_);
> > > +	RkISP1CameraData *data = cameraData(activeCamera_);
> > > +	Request *request = buffer->request();
> > > +
> > > +	RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
> > > +	if (!info)
> > > +		return;
> > > +
> > > +	if (info->scalerCrop)
> > > +		request->metadata().set(controls::ScalerCrop, info->scalerCrop.value());
> > > +
> > >   	completeBuffer(request, buffer);
> > >   	tryCompleteRequest(info);
> > >   }
> > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > index c49017d1..c96ec1d6 100644
> > > --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > @@ -56,7 +56,7 @@ const std::map<PixelFormat, uint32_t> formatToMediaBus = {
> > >
> > >   RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
> > >   		       const Size &minResolution, const Size &maxResolution)
> > > -	: name_(name), running_(false), formats_(formats),
> > > +	: name_(name), running_(false), internalBufs_(false), formats_(formats),
> > >   	  minResolution_(minResolution), maxResolution_(maxResolution),
> > >   	  link_(nullptr)
> > >   {
> > > @@ -402,10 +402,12 @@ int RkISP1Path::start()
> > >   	if (running_)
> > >   		return -EBUSY;
> > >
> > > -	/* \todo Make buffer count user configurable. */
> > > -	ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> > > -	if (ret)
> > > -		return ret;
> > > +	if (!internalBufs_) {
> > Would you still need this if you exportBuffers() from the main path ?
> >
> > > +		/* \todo Make buffer count user configurable. */
> > > +		ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> > > +		if (ret)
> > > +			return ret;
> > > +	}
> > >
> > >   	ret = video_->streamOn();
> > >   	if (ret) {
> > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > index 08edefec..b7fa82d0 100644
> > > --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > @@ -55,6 +55,19 @@ public:
> > >   		return video_->exportBuffers(bufferCount, buffers);
> > >   	}
> > >
> > > +	int allocateBuffers(unsigned int bufferCount,
> > > +			    std::vector<std::unique_ptr<FrameBuffer>> *buffers)
> > > +	{
> > > +		internalBufs_ = true;
> > > +		return video_->allocateBuffers(bufferCount, buffers);
> > What if allocateBuffers() fails ? You have sent the internalBufs_ flag
> > before this call.
> >
> > > +	}
> > > +
> > > +	int releaseBuffers()
> > > +	{
> > > +		ASSERT(internalBufs_);
> > > +		return video_->releaseBuffers();
> > > +	}
> > > +
> > I think you should move these to the cpp file.
> >
> > >   	int start();
> > >   	void stop();
> > >
> > > @@ -68,6 +81,7 @@ private:
> > >
> > >   	const char *name_;
> > >   	bool running_;
> > > +	bool internalBufs_;
> > >
> > >   	const Span<const PixelFormat> formats_;
> > >   	std::set<PixelFormat> streamFormats_;
> > > --
> > > 2.45.2
> > >
>
Paul Elder July 19, 2024, 7:11 a.m. UTC | #4
On Thu, Jul 18, 2024 at 11:54:05AM +0200, Jacopo Mondi wrote:
> Hi Umang
> 
> On Thu, Jul 18, 2024 at 10:52:35AM GMT, Umang Jain wrote:
> > Hi Jacopo
> >
> > On 17/07/24 6:40 pm, Jacopo Mondi wrote:
> > > Hi Umang
> > >
> > > On Wed, Jul 10, 2024 at 01:51:51AM GMT, Umang Jain wrote:
> > > > Plumb the ConverterDW100 converter in the rkisp1 pipeline handler.
> > > > If the dewarper is found, it is instantiated and buffers are exported
> > > > from it, instead of RkISP1Path. Internal buffers are allocated for the
> > > > RkISP1Path in case where dewarper is going to be used.
> > > >
> > > > The RKISP1 pipeline handler now supports scaler crop control through
> > > > ConverterDW100. Register the ScalerCrop control for the cameras created
> > > > in the RKISP1 pipeline handler.
> > > >
> > > > Signed-off-by: Umang Jain <umang.jain@ideasonboard.com>
> > > > ---
> > > >   src/libcamera/pipeline/rkisp1/rkisp1.cpp      | 148 +++++++++++++++++-
> > > >   src/libcamera/pipeline/rkisp1/rkisp1_path.cpp |  12 +-
> > > >   src/libcamera/pipeline/rkisp1/rkisp1_path.h   |  14 ++
> > > >   3 files changed, 165 insertions(+), 9 deletions(-)
> > > >
> > > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > > index bfc44239..881e10e1 100644
> > > > --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> > > > @@ -8,9 +8,11 @@
> > > >   #include <algorithm>
> > > >   #include <array>
> > > >   #include <iomanip>
> > > > +#include <map>
> > > >   #include <memory>
> > > >   #include <numeric>
> > > >   #include <queue>
> > > > +#include <vector>
> > > >
> > > >   #include <linux/media-bus-format.h>
> > > >   #include <linux/rkisp1-config.h>
> > > > @@ -33,6 +35,7 @@
> > > >
> > > >   #include "libcamera/internal/camera.h"
> > > >   #include "libcamera/internal/camera_sensor.h"
> > > > +#include "libcamera/internal/converter/converter_dw100.h"
> > > >   #include "libcamera/internal/delayed_controls.h"
> > > >   #include "libcamera/internal/device_enumerator.h"
> > > >   #include "libcamera/internal/framebuffer.h"
> > > > @@ -62,6 +65,8 @@ struct RkISP1FrameInfo {
> > > >
> > > >   	bool paramDequeued;
> > > >   	bool metadataProcessed;
> > > > +
> > > > +	std::optional<Rectangle> scalerCrop;
> > > >   };
> > > >
> > > >   class RkISP1Frames
> > > > @@ -181,6 +186,7 @@ private:
> > > >   	void bufferReady(FrameBuffer *buffer);
> > > >   	void paramReady(FrameBuffer *buffer);
> > > >   	void statReady(FrameBuffer *buffer);
> > > > +	void dewarpBufferReady(FrameBuffer *buffer);
> > > >   	void frameStart(uint32_t sequence);
> > > >
> > > >   	int allocateBuffers(Camera *camera);
> > > > @@ -200,6 +206,13 @@ private:
> > > >   	RkISP1MainPath mainPath_;
> > > >   	RkISP1SelfPath selfPath_;
> > > >
> > > > +	std::unique_ptr<ConverterDW100> dewarper_;
> > > > +	bool useDewarper_;
> > > > +
> > > > +	/* Internal buffers used when dewarper is being used */
> > > > +	std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
> > > > +	std::queue<FrameBuffer *> availableMainPathBuffers_;
> > > > +
> > > >   	std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
> > > >   	std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
> > > >   	std::queue<FrameBuffer *> availableParamBuffers_;
> > > > @@ -222,6 +235,8 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> > > >
> > > >   	FrameBuffer *paramBuffer = nullptr;
> > > >   	FrameBuffer *statBuffer = nullptr;
> > > > +	FrameBuffer *mainPathBuffer = nullptr;
> > > > +	FrameBuffer *selfPathBuffer = nullptr;
> > > >
> > > >   	if (!isRaw) {
> > > >   		if (pipe_->availableParamBuffers_.empty()) {
> > > > @@ -239,10 +254,16 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> > > >
> > > >   		statBuffer = pipe_->availableStatBuffers_.front();
> > > >   		pipe_->availableStatBuffers_.pop();
> > > > +
> > > > +		if (pipe_->useDewarper_) {
> > > > +			mainPathBuffer = pipe_->availableMainPathBuffers_.front();
> > > Can we exaust the availableMainPathBuffers_ queue? Should this be
> > > checked for validity ?
> >
> > Seems rule to be applied for all available*Buffers_ in rkisp1 pipeline
> > handler?
> >
> 
> Should they all be fixed then ? :)
> 
> > >
> > > > +			pipe_->availableMainPathBuffers_.pop();
> > > > +		}
> > > >   	}
> > > >
> > > > -	FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> > > > -	FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> > > > +	if (!mainPathBuffer)
> > > > +		mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> > > > +	selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> > > >
> > > >   	RkISP1FrameInfo *info = new RkISP1FrameInfo;
> > > >
> > > > @@ -268,6 +289,7 @@ int RkISP1Frames::destroy(unsigned int frame)
> > > >
> > > >   	pipe_->availableParamBuffers_.push(info->paramBuffer);
> > > >   	pipe_->availableStatBuffers_.push(info->statBuffer);
> > > > +	pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> > > >
> > > >   	frameInfo_.erase(info->frame);
> > > >
> > > > @@ -283,6 +305,7 @@ void RkISP1Frames::clear()
> > > >
> > > >   		pipe_->availableParamBuffers_.push(info->paramBuffer);
> > > >   		pipe_->availableStatBuffers_.push(info->statBuffer);
> > > > +		pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> > > >
> > > >   		delete info;
> > > >   	}
> > > > @@ -607,7 +630,7 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
> > > >    */
> > > >
> > > >   PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> > > > -	: PipelineHandler(manager), hasSelfPath_(true)
> > > > +	: PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
> > > >   {
> > > >   }
> > > >
> > > > @@ -785,12 +808,19 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
> > > >   		<< " crop " << rect;
> > > >
> > > >   	std::map<unsigned int, IPAStream> streamConfig;
> > > > +	std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
> > > >
> > > >   	for (const StreamConfiguration &cfg : *config) {
> > > >   		if (cfg.stream() == &data->mainPathStream_) {
> > > >   			ret = mainPath_.configure(cfg, format);
> > > >   			streamConfig[0] = IPAStream(cfg.pixelFormat,
> > > >   						    cfg.size);
> > > > +			/* Configure dewarp */
> > > > +			if (dewarper_ && !isRaw_) {
> > > > +				outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));
> > > Why a vector, can you have more than one ?
> >
> > converter can have more than one outputs yes
> 
> Maybe in the general case. Here it doesn't seem you can have more than one
> config that gets applied to the dewarper
> 
> > >
> > > > +				ret = dewarper_->configure(cfg, outputCfgs);
> > > Ah, the need to have a vector comes from the configure() signature
> > >
> > > Seems like
> > > 				ret = dewarper_->configure(cfg, { cfg });
> > >
> > > would do
> >
> > const is a problem here I think
> >
> 
> compiles here, not sent through the CI loop.
> 
> Have you had any compiler error with the above ?
> 
> > >
> > > > +				useDewarper_ = ret ? false : true;
> > > > +			}
> > > >   		} else if (hasSelfPath_) {
> > > >   			ret = selfPath_.configure(cfg, format);
> > > >   			streamConfig[1] = IPAStream(cfg.pixelFormat,
> > > > @@ -839,6 +869,9 @@ int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
> > > >   	RkISP1CameraData *data = cameraData(camera);
> > > >   	unsigned int count = stream->configuration().bufferCount;
> > > >
> > > > +	if (useDewarper_)
> > > > +		return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
> > > > +
> > > Unconditionally ? Even if the stream is the selfPath one ?
> >
> > dewarper only works with mainPath_ I believe.
> 
> Is it a pipeline handler design choice or an HW limitation ?

All devices that have a dewarper only have a main path (so far, at least).

> 
> >
> > The i.MX8MP doesn't have selfPath as far as I know? Paul, can you correct me
> > on this - I think we had a discussion regarding that.

Yeah that's right.

> >
> 
> 8mp doesn't have a self path
> rkisp doesn't have a dewarper
> 
> so this seems to be safe -for now-

Yeah.


Paul

> 
> > >
> > > >   	if (stream == &data->mainPathStream_)
> > > >   		return mainPath_.exportBuffers(count, buffers);
> > > >   	else if (hasSelfPath_ && stream == &data->selfPathStream_)
> > > > @@ -866,6 +899,16 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> > > >   		ret = stat_->allocateBuffers(maxCount, &statBuffers_);
> > > >   		if (ret < 0)
> > > >   			goto error;
> > > > +
> > > > +		/* If the dewarper is being used, allocate internal buffers for ISP */
> > > > +		if (useDewarper_) {
> > > > +			ret = mainPath_.allocateBuffers(maxCount, &mainPathBuffers_);
> > > > +			if (ret < 0)
> > > > +				goto error;
> > > Is this right ?
> > >
> > > It seems the model here is to allocate buffers in the main path and
> > > import them in the dewarper.
> >
> > The model here is to simply export the buffers from dewarper and allocate
> > internal buffers for RkISP1Path
> 
> Yes and I wonder if it is correct.
> 
> I'm not talking about buffers exported from the dewarper to the
> application (the one created by exportFrameBuffers() which only serves
> for the FrameBufferAllocator to provide FrameBuffers from the
> application).
> 
> I'm talking about the buffers between the main path and the dewarper.
> 
> >
> > These internal buffers allocated (availableMainPathBuffers_), are then
> > queued to the dewarper.
> >
> > >
> > > allocateBuffer() creates and exports the buffers but sets the video
> > > device in MMAP mode.
> > >
> > > I would be expecting instead the main path to
> > > 1) export buffers
> > > 2) import them back to intialize the queue in DMABUF mode
> > >
> > >  From there buffers dequeued from the main path are queued to the
> > > dewarper without any need to be mapped into a userspace accesible
> > > address.
> > >
> > > Am I confused maybe ?
> >
> > Now I'm confused with the above strategy. Do you mean that export buffers
> > from mainPath and have internal buffers allocated in the dewarper?
> 
> I mean allocating memory in the main path and pass them to the
> dewarper as dmabufs, which is what allocateBuffers() does, but
> initializes the queue in MMAP mode, where you would only need DMABUF.
> 
> >
> > When you say,
> >
> > 2) import them back to intialize the queue in DMABUF mode
> >
> > Import them back where?
> 
> In the mainpath itself. See how the CIO2 does that.
> 
> int CIO2Device::start()
> {
> 	int ret = output_->exportBuffers(kBufferCount, &buffers_);
> 	if (ret < 0)
> 		return ret;
> 
> 	ret = output_->importBuffers(kBufferCount);
> 	if (ret)
> 		LOG(IPU3, Error) << "Failed to import CIO2 buffers";
> 
> 
> The main idea is that both allocateBuffers() and exportBuffers()
> actually allocate memory on the device on which those functions are
> called. The difference is the memory type used by the videobuf2 queue.
> importBuffers() instead does not allocate memory on the device but
> initializes its memory type to DMABUF and prepares it to receive
> queueBuffer() calls.
> 
> With allocateBuffers() the memory type is set to MMAP, with
> exportBuffers() is not initialized, so you have to later call
> either allocateBuffers() or importBuffers() to initialize it.
> 
> As V4L2M2MConverter import buffers at start() time
> 
> int V4L2M2MConverter::V4L2M2MStream::start()
> {
> 	int ret = m2m_->output()->importBuffers(inputBufferCount_);
> 	if (ret < 0)
> 		return ret;
> 
> 	ret = m2m_->capture()->importBuffers(outputBufferCount_);
> 	if (ret < 0) {
> 		stop();
> 		return ret;
> 	}
> 
> I wonder if it is necessary to set the mainpath in MMAP mode or it can
> operate in dmabuf mode only by combining exportBuffers() +
> importBuffers(). To be honest the implications in videobuf2 of using
> one memory type or the other are not 100% clear to me yet.
> 
> Reading the V4L2VideoDevice documentation, however, allocateBuffers()
> seems to be the preferred way to implement internal buffer pools
> between video devices, so maybe I'm totally off-track here
> 
>  *
>  * The V4L2VideoDevice class supports the V4L2 MMAP and DMABUF memory types:
>  *
>  * - The allocateBuffers() function wraps buffer allocation with the V4L2 MMAP
>  *   memory type. It requests buffers from the driver, allocating the
>  *   corresponding memory, and exports them as a set of FrameBuffer objects.
>  *   Upon successful return the driver's internal buffer management is
>  *   initialized in MMAP mode, and the video device is ready to accept
>  *   queueBuffer() calls.
>  *
>  *   This is the most traditional V4L2 buffer management, and is mostly useful
>  *   to support internal buffer pools in pipeline handlers, either for CPU
>  *   consumption (such as statistics or parameters pools), or for internal
>  *   image buffers shared between devices.
>  *
>  * - The exportBuffers() function operates similarly to allocateBuffers(), but
>  *   leaves the driver's internal buffer management uninitialized. It uses the
>  *   V4L2 buffer orphaning support to allocate buffers with the MMAP method,
>  *   export them as a set of FrameBuffer objects, and reset the driver's
>  *   internal buffer management. The video device shall be initialized with
>  *   importBuffers() or allocateBuffers() before it can accept queueBuffer()
>  *   calls. The exported buffers are directly usable with any V4L2 video device
>  *   in DMABUF mode, or with other dmabuf importers.
>  *
>  *   This method is mostly useful to implement buffer allocation helpers or to
>  *   allocate ancillary buffers, when a V4L2 video device is used in DMABUF
>  *   mode but no other source of buffers is available. An example use case
>  *   would be allocation of scratch buffers to be used in case of buffer
>  *   underruns on a video device that is otherwise supplied with external
>  *   buffers.
>  *
>  * - The importBuffers() function initializes the driver's buffer management to
>  *   import buffers in DMABUF mode. It requests buffers from the driver, but
>  *   doesn't allocate memory. Upon successful return, the video device is ready
>  *   to accept queueBuffer() calls. The buffers to be imported are provided to
>  *   queueBuffer(), and may be supplied externally, or come from a previous
>  *   exportBuffers() call.
>  *
>  *   This is the usual buffers initialization method for video devices whose
>  *   buffers are exposed outside of libcamera. It is also typically used on one
>  *   of the two video device that participate in buffer sharing inside
>  *   pipelines, the other video device typically using allocateBuffers().
>  *
> 
> >
> > But certainly it's a worth while idea to have them exported from main path
> > and also queue to it the dewarper. Would require some experimentation from
> > my side.
> >
> > >
> > >
> > > > +
> > > > +			for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
> > > > +				availableMainPathBuffers_.push(buffer.get());
> > > > +		}
> > > >   	}
> > > >
> > > >   	for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
> > > > @@ -889,6 +932,7 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> > > >   error:
> > > >   	paramBuffers_.clear();
> > > >   	statBuffers_.clear();
> > > > +	mainPathBuffers_.clear();
> > > >
> > > >   	return ret;
> > > >   }
> > > > @@ -903,8 +947,12 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
> > > >   	while (!availableParamBuffers_.empty())
> > > >   		availableParamBuffers_.pop();
> > > >
> > > > +	while (!availableMainPathBuffers_.empty())
> > > > +		availableMainPathBuffers_.pop();
> > > > +
> > > >   	paramBuffers_.clear();
> > > >   	statBuffers_.clear();
> > > > +	mainPathBuffers_.clear();
> > > >
> > > >   	std::vector<unsigned int> ids;
> > > >   	for (IPABuffer &ipabuf : data->ipaBuffers_)
> > > > @@ -919,6 +967,9 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
> > > >   	if (stat_->releaseBuffers())
> > > >   		LOG(RkISP1, Error) << "Failed to release stat buffers";
> > > >
> > > > +	if (useDewarper_)
> > > > +		mainPath_.releaseBuffers();
> > > > +
> > > >   	return 0;
> > > >   }
> > > >
> > > > @@ -961,6 +1012,14 @@ int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
> > > >   				<< "Failed to start statistics " << camera->id();
> > > >   			return ret;
> > > >   		}
> > > > +
> > > > +		if (useDewarper_) {
> > > > +			ret = dewarper_->start();
> > > > +			if (ret) {
> > > > +				LOG(RkISP1, Error) << "Failed to start dewarper";
> > > The error handling path should undo all the previous actions
> > >
> > > > +				return ret;
> > > > +			}
> > > > +		}
> > > >   	}
> > > >
> > > >   	if (data->mainPath_->isEnabled()) {
> > > > @@ -1015,6 +1074,9 @@ void PipelineHandlerRkISP1::stopDevice(Camera *camera)
> > > >   		if (ret)
> > > >   			LOG(RkISP1, Warning)
> > > >   				<< "Failed to stop parameters for " << camera->id();
> > > > +
> > > > +		if (useDewarper_)
> > > > +			dewarper_->stop();
> > > >   	}
> > > >
> > > >   	ASSERT(data->queuedRequests_.empty());
> > > > @@ -1045,6 +1107,25 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
> > > >   					     info->paramBuffer->cookie());
> > > >   	}
> > > >
> > > > +	const auto &crop = request->controls().get(controls::ScalerCrop);
> > > > +	if (crop && useDewarper_) {
> > > > +		Rectangle appliedRect = crop.value();
> > > > +		int ret = dewarper_->setCrop(&data->mainPathStream_, &appliedRect);
> > > > +		if (!ret) {
> > > > +			if (appliedRect != crop.value()) {
> > > > +				/*
> > > > +				 * \todo How to handle these case?
> > > > +				 * Do we aim for pixel perfect set rectangles?
> > > > +				 */
> > > I think this should be reported in metadata as it has been applied and
> > > let the application decide what to do there
> >
> > ack
> > >
> > > > +				LOG(RkISP1, Warning)
> > > > +					<< "Applied rectangle " << appliedRect.toString()
> > > > +					<< " differs from requested " << crop.value().toString();
> > > > +			}
> > > > +
> > > > +			info->scalerCrop = appliedRect;
> > > > +		}
> > > > +	}
> > > > +
> > > >   	data->frame_++;
> > > >
> > > >   	return 0;
> > > > @@ -1110,6 +1191,12 @@ int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
> > > >   {
> > > >   	ControlInfoMap::Map rkisp1Controls;
> > > >
> > > > +	if (dewarper_) {
> > > > +		auto [minCrop, maxCrop] = dewarper_->getCropBounds(&data->mainPathStream_);
> > > > +
> > > > +		rkisp1Controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
> > > > +	}
> > > > +
> > > >   	/* Add the IPA registered controls to list of camera controls. */
> > > >   	for (const auto &ipaControl : data->ipaControls_)
> > > >   		rkisp1Controls[ipaControl.first] = ipaControl.second;
> > > > @@ -1173,6 +1260,7 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
> > > >
> > > >   bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > > >   {
> > > > +	std::shared_ptr<MediaDevice> dwpMediaDevice;
> > > >   	const MediaPad *pad;
> > > >
> > > >   	DeviceMatch dm("rkisp1");
> > > > @@ -1237,6 +1325,26 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> > > >   	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
> > > >   	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
> > > >
> > > > +	/* If dewarper is present, create its instance. */
> > > > +	DeviceMatch dwp("dw100");
> > > > +	dwp.add("dw100-source");
> > > > +	dwp.add("dw100-sink");
> > > > +
> > > > +	dwpMediaDevice = enumerator->search(dwp);
> > > > +	if (dwpMediaDevice) {
> > > > +		dewarper_ = std::make_unique<ConverterDW100>(std::move(dwpMediaDevice));
> > > > +		if (dewarper_->isValid()) {
> > > > +			dewarper_->outputBufferReady.connect(
> > > > +				this, &PipelineHandlerRkISP1::dewarpBufferReady);
> > > > +
> > > > +			LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
> > > > +		} else {
> > > > +			LOG(RkISP1, Debug) << "Found DW100 dewarper " << dewarper_->deviceNode()
> > > > +					   << " but invalid";
> > > > +			dewarper_.reset();
> > > > +		}
> > > > +	}
> > > > +
> > > >   	/*
> > > >   	 * Enumerate all sensors connected to the ISP and create one
> > > >   	 * camera instance for each of them.
> > > > @@ -1283,7 +1391,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> > > >   		return;
> > > >
> > > >   	const FrameMetadata &metadata = buffer->metadata();
> > > > -	Request *request = buffer->request();
> > > > +	Request *request = info->request;
> > > >
> > > >   	if (metadata.status != FrameMetadata::FrameCancelled) {
> > > >   		/*
> > > > @@ -1300,11 +1408,43 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> > > >   				data->delayedCtrls_->get(metadata.sequence);
> > > >   			data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
> > > >   		}
> > > > +
> > > rougue white line
> > >
> > > >   	} else {
> > > >   		if (isRaw_)
> > > >   			info->metadataProcessed = true;
> > > >   	}
> > > >
> > > > +	if (useDewarper_) {
> > > > +		/*
> > > > +		 * Queue input and output buffers to the dewarper. The output
> > > > +		 * buffers for the dewarper are the buffers of the request, supplied
> > > > +		 * by the application.
> > > > +		 */
> > > > +		int ret = dewarper_->queueBuffers(buffer, request->buffers());
> > > What if buffer comes from the self path ?
> >
> > I am yet to become aware of a i.MX8MP platform where dewarper is present -
> > along with main and self paths.
> >
> 
> Ok, as said above if 8mp has no self path and rkisp1 has no dewarper
> (but a self path) we're safe -for now-
> 
> > >
> > > > +		if (ret < 0)
> > > > +			LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
> > > > +					   << strerror(-ret);
> > > > +
> > > > +		return;
> > > > +	}
> > > > +
> > > > +	completeBuffer(request, buffer);
> > > > +	tryCompleteRequest(info);
> > > > +}
> > > > +
> > > > +void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
> > > > +{
> > > > +	ASSERT(activeCamera_);
> > > > +	RkISP1CameraData *data = cameraData(activeCamera_);
> > > > +	Request *request = buffer->request();
> > > > +
> > > > +	RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
> > > > +	if (!info)
> > > > +		return;
> > > > +
> > > > +	if (info->scalerCrop)
> > > > +		request->metadata().set(controls::ScalerCrop, info->scalerCrop.value());
> > > > +
> > > >   	completeBuffer(request, buffer);
> > > >   	tryCompleteRequest(info);
> > > >   }
> > > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > > index c49017d1..c96ec1d6 100644
> > > > --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
> > > > @@ -56,7 +56,7 @@ const std::map<PixelFormat, uint32_t> formatToMediaBus = {
> > > >
> > > >   RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
> > > >   		       const Size &minResolution, const Size &maxResolution)
> > > > -	: name_(name), running_(false), formats_(formats),
> > > > +	: name_(name), running_(false), internalBufs_(false), formats_(formats),
> > > >   	  minResolution_(minResolution), maxResolution_(maxResolution),
> > > >   	  link_(nullptr)
> > > >   {
> > > > @@ -402,10 +402,12 @@ int RkISP1Path::start()
> > > >   	if (running_)
> > > >   		return -EBUSY;
> > > >
> > > > -	/* \todo Make buffer count user configurable. */
> > > > -	ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> > > > -	if (ret)
> > > > -		return ret;
> > > > +	if (!internalBufs_) {
> > > Would you still need this if you exportBuffers() from the main path ?
> > >
> > > > +		/* \todo Make buffer count user configurable. */
> > > > +		ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
> > > > +		if (ret)
> > > > +			return ret;
> > > > +	}
> > > >
> > > >   	ret = video_->streamOn();
> > > >   	if (ret) {
> > > > diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > > index 08edefec..b7fa82d0 100644
> > > > --- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > > +++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
> > > > @@ -55,6 +55,19 @@ public:
> > > >   		return video_->exportBuffers(bufferCount, buffers);
> > > >   	}
> > > >
> > > > +	int allocateBuffers(unsigned int bufferCount,
> > > > +			    std::vector<std::unique_ptr<FrameBuffer>> *buffers)
> > > > +	{
> > > > +		internalBufs_ = true;
> > > > +		return video_->allocateBuffers(bufferCount, buffers);
> > > What if allocateBuffers() fails ? You have sent the internalBufs_ flag
> > > before this call.
> > >
> > > > +	}
> > > > +
> > > > +	int releaseBuffers()
> > > > +	{
> > > > +		ASSERT(internalBufs_);
> > > > +		return video_->releaseBuffers();
> > > > +	}
> > > > +
> > > I think you should move these to the cpp file.
> > >
> > > >   	int start();
> > > >   	void stop();
> > > >
> > > > @@ -68,6 +81,7 @@ private:
> > > >
> > > >   	const char *name_;
> > > >   	bool running_;
> > > > +	bool internalBufs_;
> > > >
> > > >   	const Span<const PixelFormat> formats_;
> > > >   	std::set<PixelFormat> streamFormats_;
> > > > --
> > > > 2.45.2
> > > >
> >

Patch
diff mbox series

diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
index bfc44239..881e10e1 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -8,9 +8,11 @@ 
 #include <algorithm>
 #include <array>
 #include <iomanip>
+#include <map>
 #include <memory>
 #include <numeric>
 #include <queue>
+#include <vector>
 
 #include <linux/media-bus-format.h>
 #include <linux/rkisp1-config.h>
@@ -33,6 +35,7 @@ 
 
 #include "libcamera/internal/camera.h"
 #include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/converter/converter_dw100.h"
 #include "libcamera/internal/delayed_controls.h"
 #include "libcamera/internal/device_enumerator.h"
 #include "libcamera/internal/framebuffer.h"
@@ -62,6 +65,8 @@  struct RkISP1FrameInfo {
 
 	bool paramDequeued;
 	bool metadataProcessed;
+
+	std::optional<Rectangle> scalerCrop;
 };
 
 class RkISP1Frames
@@ -181,6 +186,7 @@  private:
 	void bufferReady(FrameBuffer *buffer);
 	void paramReady(FrameBuffer *buffer);
 	void statReady(FrameBuffer *buffer);
+	void dewarpBufferReady(FrameBuffer *buffer);
 	void frameStart(uint32_t sequence);
 
 	int allocateBuffers(Camera *camera);
@@ -200,6 +206,13 @@  private:
 	RkISP1MainPath mainPath_;
 	RkISP1SelfPath selfPath_;
 
+	std::unique_ptr<ConverterDW100> dewarper_;
+	bool useDewarper_;
+
+	/* Internal buffers used when dewarper is being used */
+	std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
+	std::queue<FrameBuffer *> availableMainPathBuffers_;
+
 	std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
 	std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
 	std::queue<FrameBuffer *> availableParamBuffers_;
@@ -222,6 +235,8 @@  RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
 
 	FrameBuffer *paramBuffer = nullptr;
 	FrameBuffer *statBuffer = nullptr;
+	FrameBuffer *mainPathBuffer = nullptr;
+	FrameBuffer *selfPathBuffer = nullptr;
 
 	if (!isRaw) {
 		if (pipe_->availableParamBuffers_.empty()) {
@@ -239,10 +254,16 @@  RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
 
 		statBuffer = pipe_->availableStatBuffers_.front();
 		pipe_->availableStatBuffers_.pop();
+
+		if (pipe_->useDewarper_) {
+			mainPathBuffer = pipe_->availableMainPathBuffers_.front();
+			pipe_->availableMainPathBuffers_.pop();
+		}
 	}
 
-	FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
-	FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
+	if (!mainPathBuffer)
+		mainPathBuffer = request->findBuffer(&data->mainPathStream_);
+	selfPathBuffer = request->findBuffer(&data->selfPathStream_);
 
 	RkISP1FrameInfo *info = new RkISP1FrameInfo;
 
@@ -268,6 +289,7 @@  int RkISP1Frames::destroy(unsigned int frame)
 
 	pipe_->availableParamBuffers_.push(info->paramBuffer);
 	pipe_->availableStatBuffers_.push(info->statBuffer);
+	pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
 
 	frameInfo_.erase(info->frame);
 
@@ -283,6 +305,7 @@  void RkISP1Frames::clear()
 
 		pipe_->availableParamBuffers_.push(info->paramBuffer);
 		pipe_->availableStatBuffers_.push(info->statBuffer);
+		pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
 
 		delete info;
 	}
@@ -607,7 +630,7 @@  CameraConfiguration::Status RkISP1CameraConfiguration::validate()
  */
 
 PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
-	: PipelineHandler(manager), hasSelfPath_(true)
+	: PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
 {
 }
 
@@ -785,12 +808,19 @@  int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
 		<< " crop " << rect;
 
 	std::map<unsigned int, IPAStream> streamConfig;
+	std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
 
 	for (const StreamConfiguration &cfg : *config) {
 		if (cfg.stream() == &data->mainPathStream_) {
 			ret = mainPath_.configure(cfg, format);
 			streamConfig[0] = IPAStream(cfg.pixelFormat,
 						    cfg.size);
+			/* Configure dewarp */
+			if (dewarper_ && !isRaw_) {
+				outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));
+				ret = dewarper_->configure(cfg, outputCfgs);
+				useDewarper_ = ret ? false : true;
+			}
 		} else if (hasSelfPath_) {
 			ret = selfPath_.configure(cfg, format);
 			streamConfig[1] = IPAStream(cfg.pixelFormat,
@@ -839,6 +869,9 @@  int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
 	RkISP1CameraData *data = cameraData(camera);
 	unsigned int count = stream->configuration().bufferCount;
 
+	if (useDewarper_)
+		return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
+
 	if (stream == &data->mainPathStream_)
 		return mainPath_.exportBuffers(count, buffers);
 	else if (hasSelfPath_ && stream == &data->selfPathStream_)
@@ -866,6 +899,16 @@  int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
 		ret = stat_->allocateBuffers(maxCount, &statBuffers_);
 		if (ret < 0)
 			goto error;
+
+		/* If the dewarper is being used, allocate internal buffers for ISP */
+		if (useDewarper_) {
+			ret = mainPath_.allocateBuffers(maxCount, &mainPathBuffers_);
+			if (ret < 0)
+				goto error;
+
+			for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
+				availableMainPathBuffers_.push(buffer.get());
+		}
 	}
 
 	for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
@@ -889,6 +932,7 @@  int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
 error:
 	paramBuffers_.clear();
 	statBuffers_.clear();
+	mainPathBuffers_.clear();
 
 	return ret;
 }
@@ -903,8 +947,12 @@  int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
 	while (!availableParamBuffers_.empty())
 		availableParamBuffers_.pop();
 
+	while (!availableMainPathBuffers_.empty())
+		availableMainPathBuffers_.pop();
+
 	paramBuffers_.clear();
 	statBuffers_.clear();
+	mainPathBuffers_.clear();
 
 	std::vector<unsigned int> ids;
 	for (IPABuffer &ipabuf : data->ipaBuffers_)
@@ -919,6 +967,9 @@  int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
 	if (stat_->releaseBuffers())
 		LOG(RkISP1, Error) << "Failed to release stat buffers";
 
+	if (useDewarper_)
+		mainPath_.releaseBuffers();
+
 	return 0;
 }
 
@@ -961,6 +1012,14 @@  int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
 				<< "Failed to start statistics " << camera->id();
 			return ret;
 		}
+
+		if (useDewarper_) {
+			ret = dewarper_->start();
+			if (ret) {
+				LOG(RkISP1, Error) << "Failed to start dewarper";
+				return ret;
+			}
+		}
 	}
 
 	if (data->mainPath_->isEnabled()) {
@@ -1015,6 +1074,9 @@  void PipelineHandlerRkISP1::stopDevice(Camera *camera)
 		if (ret)
 			LOG(RkISP1, Warning)
 				<< "Failed to stop parameters for " << camera->id();
+
+		if (useDewarper_)
+			dewarper_->stop();
 	}
 
 	ASSERT(data->queuedRequests_.empty());
@@ -1045,6 +1107,25 @@  int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
 					     info->paramBuffer->cookie());
 	}
 
+	const auto &crop = request->controls().get(controls::ScalerCrop);
+	if (crop && useDewarper_) {
+		Rectangle appliedRect = crop.value();
+		int ret = dewarper_->setCrop(&data->mainPathStream_, &appliedRect);
+		if (!ret) {
+			if (appliedRect != crop.value()) {
+				/*
+				 * \todo How to handle these case?
+				 * Do we aim for pixel perfect set rectangles?
+				 */
+				LOG(RkISP1, Warning)
+					<< "Applied rectangle " << appliedRect.toString()
+					<< " differs from requested " << crop.value().toString();
+			}
+
+			info->scalerCrop = appliedRect;
+		}
+	}
+
 	data->frame_++;
 
 	return 0;
@@ -1110,6 +1191,12 @@  int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
 {
 	ControlInfoMap::Map rkisp1Controls;
 
+	if (dewarper_) {
+		auto [minCrop, maxCrop] = dewarper_->getCropBounds(&data->mainPathStream_);
+
+		rkisp1Controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
+	}
+
 	/* Add the IPA registered controls to list of camera controls. */
 	for (const auto &ipaControl : data->ipaControls_)
 		rkisp1Controls[ipaControl.first] = ipaControl.second;
@@ -1173,6 +1260,7 @@  int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
 
 bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
 {
+	std::shared_ptr<MediaDevice> dwpMediaDevice;
 	const MediaPad *pad;
 
 	DeviceMatch dm("rkisp1");
@@ -1237,6 +1325,26 @@  bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
 	stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
 	param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
 
+	/* If dewarper is present, create its instance. */
+	DeviceMatch dwp("dw100");
+	dwp.add("dw100-source");
+	dwp.add("dw100-sink");
+
+	dwpMediaDevice = enumerator->search(dwp);
+	if (dwpMediaDevice) {
+		dewarper_ = std::make_unique<ConverterDW100>(std::move(dwpMediaDevice));
+		if (dewarper_->isValid()) {
+			dewarper_->outputBufferReady.connect(
+				this, &PipelineHandlerRkISP1::dewarpBufferReady);
+
+			LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
+		} else {
+			LOG(RkISP1, Debug) << "Found DW100 dewarper " << dewarper_->deviceNode()
+					   << " but invalid";
+			dewarper_.reset();
+		}
+	}
+
 	/*
 	 * Enumerate all sensors connected to the ISP and create one
 	 * camera instance for each of them.
@@ -1283,7 +1391,7 @@  void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
 		return;
 
 	const FrameMetadata &metadata = buffer->metadata();
-	Request *request = buffer->request();
+	Request *request = info->request;
 
 	if (metadata.status != FrameMetadata::FrameCancelled) {
 		/*
@@ -1300,11 +1408,43 @@  void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
 				data->delayedCtrls_->get(metadata.sequence);
 			data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
 		}
+
 	} else {
 		if (isRaw_)
 			info->metadataProcessed = true;
 	}
 
+	if (useDewarper_) {
+		/*
+		 * Queue input and output buffers to the dewarper. The output
+		 * buffers for the dewarper are the buffers of the request, supplied
+		 * by the application.
+		 */
+		int ret = dewarper_->queueBuffers(buffer, request->buffers());
+		if (ret < 0)
+			LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
+					   << strerror(-ret);
+
+		return;
+	}
+
+	completeBuffer(request, buffer);
+	tryCompleteRequest(info);
+}
+
+void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
+{
+	ASSERT(activeCamera_);
+	RkISP1CameraData *data = cameraData(activeCamera_);
+	Request *request = buffer->request();
+
+	RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
+	if (!info)
+		return;
+
+	if (info->scalerCrop)
+		request->metadata().set(controls::ScalerCrop, info->scalerCrop.value());
+
 	completeBuffer(request, buffer);
 	tryCompleteRequest(info);
 }
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
index c49017d1..c96ec1d6 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
@@ -56,7 +56,7 @@  const std::map<PixelFormat, uint32_t> formatToMediaBus = {
 
 RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
 		       const Size &minResolution, const Size &maxResolution)
-	: name_(name), running_(false), formats_(formats),
+	: name_(name), running_(false), internalBufs_(false), formats_(formats),
 	  minResolution_(minResolution), maxResolution_(maxResolution),
 	  link_(nullptr)
 {
@@ -402,10 +402,12 @@  int RkISP1Path::start()
 	if (running_)
 		return -EBUSY;
 
-	/* \todo Make buffer count user configurable. */
-	ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
-	if (ret)
-		return ret;
+	if (!internalBufs_) {
+		/* \todo Make buffer count user configurable. */
+		ret = video_->importBuffers(RKISP1_BUFFER_COUNT);
+		if (ret)
+			return ret;
+	}
 
 	ret = video_->streamOn();
 	if (ret) {
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
index 08edefec..b7fa82d0 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
+++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
@@ -55,6 +55,19 @@  public:
 		return video_->exportBuffers(bufferCount, buffers);
 	}
 
+	int allocateBuffers(unsigned int bufferCount,
+			    std::vector<std::unique_ptr<FrameBuffer>> *buffers)
+	{
+		internalBufs_ = true;
+		return video_->allocateBuffers(bufferCount, buffers);
+	}
+
+	int releaseBuffers()
+	{
+		ASSERT(internalBufs_);
+		return video_->releaseBuffers();
+	}
+
 	int start();
 	void stop();
 
@@ -68,6 +81,7 @@  private:
 
 	const char *name_;
 	bool running_;
+	bool internalBufs_;
 
 	const Span<const PixelFormat> formats_;
 	std::set<PixelFormat> streamFormats_;