[RFC,11/12] pipeline: rpi: Support memory cameras
diff mbox series

Message ID 20250827090739.86955-12-david.plowman@raspberrypi.com
State New
Headers show
Series
  • Bayer Re-Processing
Related show

Commit Message

David Plowman Aug. 27, 2025, 9:07 a.m. UTC
Mostly small changes to support memory cameras (for Bayer reprocessing)
in the pipeline handler.

Firstly, we make a "CameraSensorMemory" as the sensor as soon as we
have the raw input buffer configuration, which helps the rest of the
to work with fewer changes.

The PiSP platformConfigure method is refactored to split the Cfe and
Isp parts into separate functions. Memory cameras then omit
platformConfigureCfe but run platformConfigureIsp as before.

Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
---
 .../pipeline/rpi/common/pipeline_base.cpp     |  34 ++++-
 .../pipeline/rpi/common/pipeline_base.h       |   4 +-
 src/libcamera/pipeline/rpi/pisp/pisp.cpp      | 141 ++++++++++++------
 3 files changed, 122 insertions(+), 57 deletions(-)

Patch
diff mbox series

diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
index e7b01f9f..31bacc7c 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
@@ -20,6 +20,7 @@ 
 #include <libcamera/property_ids.h>
 
 #include "libcamera/internal/camera_lens.h"
+#include "libcamera/internal/camera_sensor_memory.h"
 #include "libcamera/internal/ipa_manager.h"
 #include "libcamera/internal/v4l2_subdevice.h"
 
@@ -170,15 +171,9 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 	status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace);
 
 	/*
-	 * Validate the requested transform against the sensor capabilities and
-	 * rotation and store the final combined transform that configure() will
-	 * need to apply to the sensor to save us working it out again.
+	 * Separate the raw and output streams first, to make it easier to
+	 * detect the raw reprocessing use case.
 	 */
-	Orientation requestedOrientation = orientation;
-	combinedTransform_ = data_->sensor_->computeTransform(&orientation);
-	if (orientation != requestedOrientation)
-		status = Adjusted;
-
 	rawStreams_.clear();
 	outStreams_.clear();
 	unsigned int rawStreamIndex = 0;
@@ -191,6 +186,29 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 			outStreams_.emplace_back(outStreamIndex++, &cfg);
 	}
 
+	/*
+	 * For the reprocessing use case, make a "memory camera" to match the
+	 * raw input buffer. This will make all the subsequent code run more like
+	 * the regular sensor case.
+	 */
+	if (rawStreams_[0].cfg->isInput()) {
+		LOG(RPI, Debug) << "Raw reprocessing use case for " << *rawStreams_[0].cfg;
+		data_->sensor_ = std::make_unique<CameraSensorMemory>(*rawStreams_[0].cfg);
+		/* We can fill in the only sensor format we support! */
+		auto const mbusCode = data_->sensor_->mbusCodes()[0];
+		data_->sensorFormats_.emplace(mbusCode, data_->sensor_->sizes(mbusCode));
+	}
+
+	/*
+	 * Validate the requested transform against the sensor capabilities and
+	 * rotation and store the final combined transform that configure() will
+	 * need to apply to the sensor to save us working it out again.
+	 */
+	Orientation requestedOrientation = orientation;
+	combinedTransform_ = data_->sensor_->computeTransform(&orientation);
+	if (orientation != requestedOrientation)
+		status = Adjusted;
+
 	/* Sort the streams so the highest resolution is first. */
 	std::sort(rawStreams_.begin(), rawStreams_.end(),
 		  [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; });
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h
index 397ad6f8..c53abaa7 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.h
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h
@@ -246,7 +246,7 @@  private:
 class RPiCameraConfiguration final : public CameraConfiguration
 {
 public:
-	RPiCameraConfiguration(const CameraData *data)
+	RPiCameraConfiguration(CameraData *data)
 		: CameraConfiguration(), data_(data)
 	{
 	}
@@ -288,7 +288,7 @@  public:
 	std::optional<ColorSpace> rgbColorSpace_;
 
 private:
-	const CameraData *data_;
+	CameraData *data_;
 };
 
 } /* namespace RPi */
diff --git a/src/libcamera/pipeline/rpi/pisp/pisp.cpp b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
index d18035ec..bb22e6d2 100644
--- a/src/libcamera/pipeline/rpi/pisp/pisp.cpp
+++ b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
@@ -756,6 +756,10 @@  public:
 	bool adjustDeviceFormat(V4L2DeviceFormat &format) const;
 
 private:
+	int platformConfigureCfe(const RPi::RPiCameraConfiguration *rpiConfig,
+				 V4L2DeviceFormat &cfeFormat);
+	int platformConfigureIsp(const RPi::RPiCameraConfiguration *rpiConfig,
+				 V4L2DeviceFormat cfeFormat);
 	int platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig) override;
 
 	int platformConfigureIpa([[maybe_unused]] ipa::RPi::ConfigParams &params) override
@@ -994,6 +998,12 @@  PipelineHandlerPiSP::createMemoryCamera(DeviceEnumerator *enumerator,
 
 	std::shared_ptr<Camera> camera = platformCreateCamera(cameraData, nullptr, ispDevice);
 
+	int ret = pisp->loadPipelineConfiguration();
+	if (ret) {
+		LOG(RPI, Error) << "Unable to load pipeline configuration";
+		return nullptr;
+	}
+
 	return camera;
 }
 
@@ -1272,10 +1282,14 @@  PiSPCameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig) const
 	}
 
 	if (!rawStreams.empty()) {
-		rawStreams[0].dev = cfe_[Cfe::Output0].dev();
-
 		StreamConfiguration *rawStream = rawStreams[0].cfg;
 		BayerFormat bayer = BayerFormat::fromPixelFormat(rawStream->pixelFormat);
+
+		if (rawStream->isInput())
+			rawStreams[0].dev = isp_[Isp::Input].dev();
+		else
+			rawStreams[0].dev = cfe_[Cfe::Output0].dev();
+
 		/*
 		 * We cannot output CSI2 packed or non 16-bit output from the frontend,
 		 * so signal the output as unpacked 16-bits in these cases.
@@ -1295,7 +1309,7 @@  PiSPCameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig) const
 		}
 
 		rawStreams[0].format =
-			RPi::PipelineHandlerBase::toV4L2DeviceFormat(cfe_[Cfe::Output0].dev(), rawStream);
+			RPi::PipelineHandlerBase::toV4L2DeviceFormat(rawStreams[0].dev, rawStream);
 
 		computeOptimalStride(rawStreams[0].format);
 	}
@@ -1489,13 +1503,13 @@  bool PiSPCameraData::adjustDeviceFormat(V4L2DeviceFormat &format) const
 	return false;
 }
 
-int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig)
+int PiSPCameraData::platformConfigureCfe(const RPi::RPiCameraConfiguration *rpiConfig,
+					 V4L2DeviceFormat &cfeFormat)
 {
 	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &rawStreams = rpiConfig->rawStreams_;
-	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &outStreams = rpiConfig->outStreams_;
 	int ret;
 	V4L2VideoDevice *cfe = cfe_[Cfe::Output0].dev();
-	V4L2DeviceFormat cfeFormat;
+	V4L2DeviceFormat format;
 
 	/*
 	 * See which streams are requested, and route the user
@@ -1540,8 +1554,59 @@  int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConf
 	}
 
 	ret = cfe->setFormat(&cfeFormat);
-	if (ret)
+
+	/* CFE statistics output format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_STATS);
+	ret = cfe_[Cfe::Stats].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on CFE stats stream: "
+				<< format.toString();
+		return ret;
+	}
+
+	/* CFE config format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_CFG);
+	ret = cfe_[Cfe::Config].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on CFE config stream: "
+				<< format.toString();
 		return ret;
+	}
+
+	/*
+	 * Configure the CFE embedded data output format only if the sensor
+	 * supports it.
+	 */
+	V4L2SubdeviceFormat embeddedFormat;
+	if (sensorMetadata_) {
+		sensor_->device()->getFormat(1, &embeddedFormat);
+		format = {};
+		format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
+		format.planes[0].size = embeddedFormat.size.width * embeddedFormat.size.height;
+
+		LOG(RPI, Debug) << "Setting embedded data format " << format.toString();
+		ret = cfe_[Cfe::Embedded].dev()->setFormat(&format);
+		if (ret) {
+			LOG(RPI, Error) << "Failed to set format on CFE embedded: "
+					<< format;
+			return ret;
+		}
+	}
+
+	configureEntities(rpiConfig->sensorFormat_, embeddedFormat);
+	configureCfe();
+
+	return 0;
+}
+
+int PiSPCameraData::platformConfigureIsp(const RPi::RPiCameraConfiguration *rpiConfig,
+					 V4L2DeviceFormat cfeFormat)
+{
+	int ret;
+
+	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &outStreams = rpiConfig->outStreams_;
 
 	/* Set the TDN and Stitch node formats in case they are turned on. */
 	isp_[Isp::TdnOutput].dev()->setFormat(&cfeFormat);
@@ -1677,53 +1742,35 @@  int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConf
 
 	beEnabled_ = beEnables & (PISP_BE_RGB_ENABLE_OUTPUT0 | PISP_BE_RGB_ENABLE_OUTPUT1);
 
-	/* CFE statistics output format. */
-	format = {};
-	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_STATS);
-	ret = cfe_[Cfe::Stats].dev()->setFormat(&format);
-	if (ret) {
-		LOG(RPI, Error) << "Failed to set format on CFE stats stream: "
-				<< format.toString();
-		return ret;
-	}
+	if (beEnabled_)
+		configureBe(rpiConfig->yuvColorSpace_);
 
-	/* CFE config format. */
-	format = {};
-	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_CFG);
-	ret = cfe_[Cfe::Config].dev()->setFormat(&format);
-	if (ret) {
-		LOG(RPI, Error) << "Failed to set format on CFE config stream: "
-				<< format.toString();
-		return ret;
-	}
+	return 0;
+}
+
+int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig)
+{
+	/* What we call the cfeFormat here is also the input format for the ISP (Back End). */
+	V4L2DeviceFormat cfeFormat;
 
 	/*
-	 * Configure the CFE embedded data output format only if the sensor
-	 * supports it.
+	 * First configure the CFE (if it's being used). In the case of memory cameras
+	 * (Bayer reprocessing), there's no CFE but the raw input stream should already
+	 * contain the correct ISP input format.
 	 */
-	V4L2SubdeviceFormat embeddedFormat;
-	if (sensorMetadata_) {
-		sensor_->device()->getFormat(1, &embeddedFormat);
-		format = {};
-		format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
-		format.planes[0].size = embeddedFormat.size.width * embeddedFormat.size.height;
-
-		LOG(RPI, Debug) << "Setting embedded data format " << format.toString();
-		ret = cfe_[Cfe::Embedded].dev()->setFormat(&format);
-		if (ret) {
-			LOG(RPI, Error) << "Failed to set format on CFE embedded: "
-					<< format;
+	if (cfe_[Cfe::Output0].dev()) {
+		/* Regular sensor. */
+		int ret = platformConfigureCfe(rpiConfig, cfeFormat);
+		if (ret)
 			return ret;
-		}
+	} else {
+		/* Memory camera. */
+		cfeFormat = rpiConfig->rawStreams_[0].format;
+		rpiConfig->rawStreams_[0].cfg->setStream(&isp_[Isp::Input]);
 	}
 
-	configureEntities(rpiConfig->sensorFormat_, embeddedFormat);
-	configureCfe();
-
-	if (beEnabled_)
-		configureBe(rpiConfig->yuvColorSpace_);
-
-	return 0;
+	/* Finally configure the back end ISP. */
+	return platformConfigureIsp(rpiConfig, cfeFormat);
 }
 
 void PiSPCameraData::platformStart()