[libcamera-devel,1/4] pipeline: ipa: raspberrypi: Pass exposure/gain values to IPA though controls
diff mbox series

Message ID 20210216103140.1077307-2-naush@raspberrypi.com
State Changes Requested
Headers show
Series
  • Raspberry Pi: Embedded data usage
Related show

Commit Message

Naushir Patuck Feb. 16, 2021, 10:31 a.m. UTC
When running with sensors that had no embedded data, the pipeline handler
would fill a dummy embedded data buffer with gain/exposure values, and
pass this buffer to the IPA together with the bayer buffer. The IPA would
extract these values for use in the controller algorithms.

Rework this logic entirely by having a new RPiCameraData::BayerFrame
queue to replace the existing bayer queue. In addition to storing the
FrameBuffer pointer, this also stores all the controls tracked by
DelayedControls for that frame in a ControlList. This includes include
exposure and gain values. On signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE
IPA event, the pipeline handler now passes this ControlList from the
RPiCameraData::BayerFrame queue.

The IPA now extracts the gain and exposure values from the ControlList
instead of using RPiController::MdParserRPi to parse the embedded data
buffer.

Signed-off-by: Naushir Patuck <naush@raspberrypi.com>
---
 src/ipa/raspberrypi/raspberrypi.cpp           | 142 +++++++++++-------
 .../pipeline/raspberrypi/raspberrypi.cpp      |  62 ++++----
 2 files changed, 113 insertions(+), 91 deletions(-)

Patch
diff mbox series

diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index ef8b19baa754..30db1235953b 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@ 
 /* SPDX-License-Identifier: BSD-2-Clause */
 /*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
  *
  * rpi.cpp - Raspberry Pi Image Processing Algorithms
  */
@@ -99,9 +99,11 @@  private:
 	bool validateIspControls();
 	void queueRequest(const ControlList &controls);
 	void returnEmbeddedBuffer(unsigned int bufferId);
-	void prepareISP(unsigned int bufferId);
+	void prepareISP(const IPAOperationData &event);
 	void reportMetadata();
 	bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
+	void fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+			      struct DeviceStatus &deviceStatus);
 	void processStats(unsigned int bufferId);
 	void applyFrameDurations(double minFrameDuration, double maxFrameDuration);
 	void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
@@ -451,15 +453,14 @@  void IPARPi::processEvent(const IPAOperationData &event)
 	}
 
 	case RPi::IPA_EVENT_SIGNAL_ISP_PREPARE: {
-		unsigned int embeddedbufferId = event.data[0];
-		unsigned int bayerbufferId = event.data[1];
+		unsigned int bayerbufferId = event.data[0];
 
 		/*
 		 * At start-up, or after a mode-switch, we may want to
 		 * avoid running the control algos for a few frames in case
 		 * they are "unreliable".
 		 */
-		prepareISP(embeddedbufferId);
+		prepareISP(event);
 		frameCount_++;
 
 		/* Ready to push the input buffer into the ISP. */
@@ -939,70 +940,88 @@  void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
 	queueFrameAction.emit(0, op);
 }
 
-void IPARPi::prepareISP(unsigned int bufferId)
+void IPARPi::prepareISP(const IPAOperationData &event)
 {
 	struct DeviceStatus deviceStatus = {};
-	bool success = parseEmbeddedData(bufferId, deviceStatus);
+	bool success = false;
 
-	/* Done with embedded data now, return to pipeline handler asap. */
-	returnEmbeddedBuffer(bufferId);
+	if (event.data.size() == 2) {
+		/*
+		 * Pipeline handler has supplied us with an embedded data buffer,
+		 * so parse it and extract the exposure and gain.
+		 */
+		unsigned int bufferId = event.data[1];
+		success = parseEmbeddedData(bufferId, deviceStatus);
 
-	if (success) {
-		ControlList ctrls(ispCtrls_);
+		/* Done with embedded data now, return to pipeline handler asap. */
+		returnEmbeddedBuffer(bufferId);
+	}
 
-		rpiMetadata_.Clear();
-		rpiMetadata_.Set("device.status", deviceStatus);
-		controller_.Prepare(&rpiMetadata_);
+	if (!success) {
+		/*
+		 * Pipeline handler has not supplied an embedded data buffer,
+		 * or embedded data buffer parsing has failed for some reason,
+		 * so pull the exposure and gain values from the control list.
+		 */
+		int32_t exposureLines = event.controls[0].get(V4L2_CID_EXPOSURE).get<int32_t>();
+		int32_t gainCode = event.controls[0].get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
+		fillDeviceStatus(exposureLines, gainCode, deviceStatus);
+	}
 
-		/* Lock the metadata buffer to avoid constant locks/unlocks. */
-		std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
+	ControlList ctrls(ispCtrls_);
 
-		AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
-		if (awbStatus)
-			applyAWB(awbStatus, ctrls);
+	rpiMetadata_.Clear();
+	rpiMetadata_.Set("device.status", deviceStatus);
+	controller_.Prepare(&rpiMetadata_);
 
-		CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
-		if (ccmStatus)
-			applyCCM(ccmStatus, ctrls);
+	/* Lock the metadata buffer to avoid constant locks/unlocks. */
+	std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
 
-		AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
-		if (dgStatus)
-			applyDG(dgStatus, ctrls);
+	AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+	if (awbStatus)
+		applyAWB(awbStatus, ctrls);
 
-		AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
-		if (lsStatus)
-			applyLS(lsStatus, ctrls);
+	CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+	if (ccmStatus)
+		applyCCM(ccmStatus, ctrls);
 
-		ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
-		if (contrastStatus)
-			applyGamma(contrastStatus, ctrls);
+	AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+	if (dgStatus)
+		applyDG(dgStatus, ctrls);
 
-		BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
-		if (blackLevelStatus)
-			applyBlackLevel(blackLevelStatus, ctrls);
+	AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+	if (lsStatus)
+		applyLS(lsStatus, ctrls);
 
-		GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
-		if (geqStatus)
-			applyGEQ(geqStatus, ctrls);
+	ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+	if (contrastStatus)
+		applyGamma(contrastStatus, ctrls);
 
-		DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
-		if (denoiseStatus)
-			applyDenoise(denoiseStatus, ctrls);
+	BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+	if (blackLevelStatus)
+		applyBlackLevel(blackLevelStatus, ctrls);
 
-		SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
-		if (sharpenStatus)
-			applySharpen(sharpenStatus, ctrls);
+	GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+	if (geqStatus)
+		applyGEQ(geqStatus, ctrls);
 
-		DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
-		if (dpcStatus)
-			applyDPC(dpcStatus, ctrls);
+	DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+	if (denoiseStatus)
+		applyDenoise(denoiseStatus, ctrls);
 
-		if (!ctrls.empty()) {
-			IPAOperationData op;
-			op.operation = RPi::IPA_ACTION_V4L2_SET_ISP;
-			op.controls.push_back(ctrls);
-			queueFrameAction.emit(0, op);
-		}
+	SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+	if (sharpenStatus)
+		applySharpen(sharpenStatus, ctrls);
+
+	DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+	if (dpcStatus)
+		applyDPC(dpcStatus, ctrls);
+
+	if (!ctrls.empty()) {
+		IPAOperationData op;
+		op.operation = RPi::IPA_ACTION_V4L2_SET_ISP;
+		op.controls.push_back(ctrls);
+		queueFrameAction.emit(0, op);
 	}
 }
 
@@ -1019,6 +1038,7 @@  bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
 	RPiController::MdParser::Status status = helper_->Parser().Parse(mem.data());
 	if (status != RPiController::MdParser::Status::OK) {
 		LOG(IPARPI, Error) << "Embedded Buffer parsing failed, error " << status;
+		return false;
 	} else {
 		uint32_t exposureLines, gainCode;
 		if (helper_->Parser().GetExposureLines(exposureLines) != RPiController::MdParser::Status::OK) {
@@ -1026,21 +1046,29 @@  bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
 			return false;
 		}
 
-		deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
 		if (helper_->Parser().GetGainCode(gainCode) != RPiController::MdParser::Status::OK) {
 			LOG(IPARPI, Error) << "Gain failed";
 			return false;
 		}
 
-		deviceStatus.analogue_gain = helper_->Gain(gainCode);
-		LOG(IPARPI, Debug) << "Metadata - Exposure : "
-				   << deviceStatus.shutter_speed << " Gain : "
-				   << deviceStatus.analogue_gain;
+		fillDeviceStatus(exposureLines, gainCode, deviceStatus);
 	}
 
 	return true;
 }
 
+void IPARPi::fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+			      struct DeviceStatus &deviceStatus)
+{
+	deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
+	deviceStatus.analogue_gain = helper_->Gain(gainCode);
+
+	LOG(IPARPI, Debug) << "Metadata - Exposure : "
+			   << deviceStatus.shutter_speed
+			   << " Gain : "
+			   << deviceStatus.analogue_gain;
+}
+
 void IPARPi::processStats(unsigned int bufferId)
 {
 	auto it = buffers_.find(bufferId);
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index e47646818c73..7e744ce13172 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@ 
 /* SPDX-License-Identifier: LGPL-2.1-or-later */
 /*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
  *
  * raspberrypi.cpp - Pipeline handler for Raspberry Pi devices
  */
@@ -188,7 +188,13 @@  public:
 	 */
 	enum class State { Stopped, Idle, Busy, IpaComplete };
 	State state_;
-	std::queue<FrameBuffer *> bayerQueue_;
+
+	struct BayerFrame {
+		FrameBuffer *buffer;
+		ControlList controls;
+	};
+
+	std::queue<BayerFrame> bayerQueue_;
 	std::queue<FrameBuffer *> embeddedQueue_;
 	std::deque<Request *> requestQueue_;
 
@@ -213,7 +219,7 @@  public:
 private:
 	void checkRequestCompleted();
 	void tryRunPipeline();
-	bool findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer);
+	bool findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer);
 
 	unsigned int ispOutputCount_;
 };
@@ -1386,29 +1392,14 @@  void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
 			<< ", timestamp: " << buffer->metadata().timestamp;
 
 	if (stream == &unicam_[Unicam::Image]) {
-		bayerQueue_.push(buffer);
-	} else {
-		embeddedQueue_.push(buffer);
-
-		ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
-
 		/*
-		 * Sensor metadata is unavailable, so put the expected ctrl
-		 * values (accounting for the staggered delays) into the empty
-		 * metadata buffer.
+		 * Lookup the sensor controls used for this frame sequence from
+		 * StaggeredCtrl and queue them along with the frame buffer.
 		 */
-		if (!sensorMetadata_) {
-			unsigned int bufferId = unicam_[Unicam::Embedded].getBufferId(buffer);
-			auto it = mappedEmbeddedBuffers_.find(bufferId);
-			if (it != mappedEmbeddedBuffers_.end()) {
-				uint32_t *mem = reinterpret_cast<uint32_t *>(it->second.maps()[0].data());
-				mem[0] = ctrl.get(V4L2_CID_EXPOSURE).get<int32_t>();
-				mem[1] = ctrl.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
-			} else {
-				LOG(RPI, Warning) << "Failed to find embedded buffer "
-						  << bufferId;
-			}
-		}
+		ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
+		bayerQueue_.push({ buffer, std::move(ctrl) });
+	} else {
+		embeddedQueue_.push(buffer);
 	}
 
 	handleState();
@@ -1662,7 +1653,8 @@  void RPiCameraData::applyScalerCrop(const ControlList &controls)
 
 void RPiCameraData::tryRunPipeline()
 {
-	FrameBuffer *bayerBuffer, *embeddedBuffer;
+	FrameBuffer *embeddedBuffer;
+	BayerFrame bayerFrame;
 	IPAOperationData op;
 
 	/* If any of our request or buffer queues are empty, we cannot proceed. */
@@ -1670,7 +1662,7 @@  void RPiCameraData::tryRunPipeline()
 	    bayerQueue_.empty() || embeddedQueue_.empty())
 		return;
 
-	if (!findMatchingBuffers(bayerBuffer, embeddedBuffer))
+	if (!findMatchingBuffers(bayerFrame, embeddedBuffer))
 		return;
 
 	/* Take the first request from the queue and action the IPA. */
@@ -1691,7 +1683,7 @@  void RPiCameraData::tryRunPipeline()
 	/* Set our state to say the pipeline is active. */
 	state_ = State::Busy;
 
-	unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerBuffer);
+	unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
 	unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
 
 	LOG(RPI, Debug) << "Signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE:"
@@ -1699,26 +1691,28 @@  void RPiCameraData::tryRunPipeline()
 			<< " Embedded buffer id: " << embeddedId;
 
 	op.operation = RPi::IPA_EVENT_SIGNAL_ISP_PREPARE;
-	op.data = { RPi::BufferMask::EMBEDDED_DATA | embeddedId,
-		    RPi::BufferMask::BAYER_DATA | bayerId };
+	op.data.push_back(RPi::BufferMask::BAYER_DATA | bayerId);
+	op.data.push_back(RPi::BufferMask::EMBEDDED_DATA | embeddedId);
+	op.controls.emplace_back(std::move(bayerFrame.controls));
+
 	ipa_->processEvent(op);
 }
 
-bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer)
+bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer)
 {
 	unsigned int embeddedRequeueCount = 0, bayerRequeueCount = 0;
 
 	/* Loop until we find a matching bayer and embedded data buffer. */
 	while (!bayerQueue_.empty()) {
 		/* Start with the front of the bayer queue. */
-		bayerBuffer = bayerQueue_.front();
+		bayerFrame = bayerQueue_.front();
 
 		/*
 		 * Find the embedded data buffer with a matching timestamp to pass to
 		 * the IPA. Any embedded buffers with a timestamp lower than the
 		 * current bayer buffer will be removed and re-queued to the driver.
 		 */
-		uint64_t ts = bayerBuffer->metadata().timestamp;
+		uint64_t ts = bayerFrame.buffer->metadata().timestamp;
 		embeddedBuffer = nullptr;
 		while (!embeddedQueue_.empty()) {
 			FrameBuffer *b = embeddedQueue_.front();
@@ -1748,7 +1742,7 @@  bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
 				 * the front of the queue. This buffer is now orphaned, so requeue
 				 * it back to the device.
 				 */
-				unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+				unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
 				bayerQueue_.pop();
 				bayerRequeueCount++;
 				LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
@@ -1766,7 +1760,7 @@  bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
 
 				LOG(RPI, Warning) << "Flushing bayer stream!";
 				while (!bayerQueue_.empty()) {
-					unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+					unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
 					bayerQueue_.pop();
 				}
 				flushedBuffers = true;