diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
index 76bcb2a4..2f02ec4c 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
@@ -20,6 +20,7 @@
 #include <libcamera/property_ids.h>
 
 #include "libcamera/internal/camera_lens.h"
+#include "libcamera/internal/camera_sensor_memory.h"
 #include "libcamera/internal/ipa_manager.h"
 #include "libcamera/internal/v4l2_subdevice.h"
 
@@ -170,15 +171,9 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 	status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace);
 
 	/*
-	 * Validate the requested transform against the sensor capabilities and
-	 * rotation and store the final combined transform that configure() will
-	 * need to apply to the sensor to save us working it out again.
+	 * Separate the raw and output streams first, to make it easier to
+	 * detect the raw reprocessing use case.
 	 */
-	Orientation requestedOrientation = orientation;
-	combinedTransform_ = data_->sensor_->computeTransform(&orientation);
-	if (orientation != requestedOrientation)
-		status = Adjusted;
-
 	rawStreams_.clear();
 	outStreams_.clear();
 	unsigned int rawStreamIndex = 0;
@@ -191,6 +186,31 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 			outStreams_.emplace_back(outStreamIndex++, &cfg);
 	}
 
+	/*
+	 * For the reprocessing use case, make a "memory camera" to match the
+	 * raw input buffer. This will make all the subsequent code run more like
+	 * the regular sensor case.
+	 */
+	if (!rawStreams_.empty() && rawStreams_[0].cfg->isInput()) {
+		LOG(RPI, Debug) << "Raw reprocessing use case for " << *rawStreams_[0].cfg;
+		const StreamConfiguration &rawInput = *rawStreams_[0].cfg;
+		BayerFormat bayerFormat = BayerFormat::fromPixelFormat(rawInput.pixelFormat);
+		unsigned int mbusCode = PipelineHandlerBase::bayerToMbusCode(bayerFormat);
+		data_->sensor_ = std::make_unique<CameraSensorMemory>(rawInput, mbusCode);
+		/* We can fill in the only sensor format we support! */
+		data_->sensorFormats_.emplace(mbusCode, data_->sensor_->sizes(mbusCode));
+	}
+
+	/*
+	 * Validate the requested transform against the sensor capabilities and
+	 * rotation and store the final combined transform that configure() will
+	 * need to apply to the sensor to save us working it out again.
+	 */
+	Orientation requestedOrientation = orientation;
+	combinedTransform_ = data_->sensor_->computeTransform(&orientation);
+	if (orientation != requestedOrientation)
+		status = Adjusted;
+
 	/* Sort the streams so the highest resolution is first. */
 	std::sort(rawStreams_.begin(), rawStreams_.end(),
 		  [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; });
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h
index 9453ae7e..76a269bd 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.h
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h
@@ -250,7 +250,7 @@ private:
 class RPiCameraConfiguration final : public CameraConfiguration
 {
 public:
-	RPiCameraConfiguration(const CameraData *data)
+	RPiCameraConfiguration(CameraData *data)
 		: CameraConfiguration(), data_(data)
 	{
 	}
@@ -292,7 +292,7 @@ public:
 	std::optional<ColorSpace> rgbColorSpace_;
 
 private:
-	const CameraData *data_;
+	CameraData *data_;
 };
 
 } /* namespace RPi */
diff --git a/src/libcamera/pipeline/rpi/pisp/pisp.cpp b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
index 726ab063..da3f5f40 100644
--- a/src/libcamera/pipeline/rpi/pisp/pisp.cpp
+++ b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
@@ -756,6 +756,10 @@ public:
 	bool adjustDeviceFormat(V4L2DeviceFormat &format) const;
 
 private:
+	int platformConfigureCfe(const RPi::RPiCameraConfiguration *rpiConfig,
+				 V4L2DeviceFormat &cfeFormat);
+	int platformConfigureIsp(const RPi::RPiCameraConfiguration *rpiConfig,
+				 V4L2DeviceFormat cfeFormat);
 	int platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig) override;
 
 	int platformConfigureIpa([[maybe_unused]] ipa::RPi::ConfigParams &params) override
@@ -995,6 +999,12 @@ PipelineHandlerPiSP::createMemoryCamera(DeviceEnumerator *enumerator,
 
 	std::shared_ptr<Camera> camera = platformCreateCamera(cameraData, nullptr, ispDevice.get());
 
+	int ret = pisp->loadPipelineConfiguration();
+	if (ret) {
+		LOG(RPI, Error) << "Unable to load pipeline configuration";
+		return nullptr;
+	}
+
 	return camera;
 }
 
@@ -1274,10 +1284,14 @@ PiSPCameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig) const
 	}
 
 	if (!rawStreams.empty()) {
-		rawStreams[0].dev = cfe_[Cfe::Output0].dev();
-
 		StreamConfiguration *rawStream = rawStreams[0].cfg;
 		BayerFormat bayer = BayerFormat::fromPixelFormat(rawStream->pixelFormat);
+
+		if (rawStream->isInput())
+			rawStreams[0].dev = isp_[Isp::Input].dev();
+		else
+			rawStreams[0].dev = cfe_[Cfe::Output0].dev();
+
 		/*
 		 * We cannot output CSI2 packed or non 16-bit output from the frontend,
 		 * so signal the output as unpacked 16-bits in these cases.
@@ -1297,7 +1311,7 @@ PiSPCameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig) const
 		}
 
 		rawStreams[0].format =
-			RPi::PipelineHandlerBase::toV4L2DeviceFormat(cfe_[Cfe::Output0].dev(), rawStream);
+			RPi::PipelineHandlerBase::toV4L2DeviceFormat(rawStreams[0].dev, rawStream);
 
 		computeOptimalStride(rawStreams[0].format);
 	}
@@ -1491,13 +1505,13 @@ bool PiSPCameraData::adjustDeviceFormat(V4L2DeviceFormat &format) const
 	return false;
 }
 
-int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig)
+int PiSPCameraData::platformConfigureCfe(const RPi::RPiCameraConfiguration *rpiConfig,
+					 V4L2DeviceFormat &cfeFormat)
 {
 	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &rawStreams = rpiConfig->rawStreams_;
-	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &outStreams = rpiConfig->outStreams_;
 	int ret;
 	V4L2VideoDevice *cfe = cfe_[Cfe::Output0].dev();
-	V4L2DeviceFormat cfeFormat;
+	V4L2DeviceFormat format;
 
 	/*
 	 * See which streams are requested, and route the user
@@ -1542,8 +1556,59 @@ int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConf
 	}
 
 	ret = cfe->setFormat(&cfeFormat);
-	if (ret)
+
+	/* CFE statistics output format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_STATS);
+	ret = cfe_[Cfe::Stats].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on CFE stats stream: "
+				<< format.toString();
+		return ret;
+	}
+
+	/* CFE config format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_CFG);
+	ret = cfe_[Cfe::Config].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on CFE config stream: "
+				<< format.toString();
 		return ret;
+	}
+
+	/*
+	 * Configure the CFE embedded data output format only if the sensor
+	 * supports it.
+	 */
+	V4L2SubdeviceFormat embeddedFormat;
+	if (sensorMetadata_) {
+		sensor_->device()->getFormat(1, &embeddedFormat);
+		format = {};
+		format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
+		format.planes[0].size = embeddedFormat.size.width * embeddedFormat.size.height;
+
+		LOG(RPI, Debug) << "Setting embedded data format " << format.toString();
+		ret = cfe_[Cfe::Embedded].dev()->setFormat(&format);
+		if (ret) {
+			LOG(RPI, Error) << "Failed to set format on CFE embedded: "
+					<< format;
+			return ret;
+		}
+	}
+
+	configureEntities(rpiConfig->sensorFormat_, embeddedFormat);
+	configureCfe();
+
+	return 0;
+}
+
+int PiSPCameraData::platformConfigureIsp(const RPi::RPiCameraConfiguration *rpiConfig,
+					 V4L2DeviceFormat cfeFormat)
+{
+	int ret;
+
+	const std::vector<RPi::RPiCameraConfiguration::StreamParams> &outStreams = rpiConfig->outStreams_;
 
 	/* Set the TDN and Stitch node formats in case they are turned on. */
 	isp_[Isp::TdnOutput].dev()->setFormat(&cfeFormat);
@@ -1679,53 +1744,35 @@ int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConf
 
 	beEnabled_ = beEnables & (PISP_BE_RGB_ENABLE_OUTPUT0 | PISP_BE_RGB_ENABLE_OUTPUT1);
 
-	/* CFE statistics output format. */
-	format = {};
-	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_STATS);
-	ret = cfe_[Cfe::Stats].dev()->setFormat(&format);
-	if (ret) {
-		LOG(RPI, Error) << "Failed to set format on CFE stats stream: "
-				<< format.toString();
-		return ret;
-	}
+	if (beEnabled_)
+		configureBe(rpiConfig->yuvColorSpace_);
 
-	/* CFE config format. */
-	format = {};
-	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_RPI_FE_CFG);
-	ret = cfe_[Cfe::Config].dev()->setFormat(&format);
-	if (ret) {
-		LOG(RPI, Error) << "Failed to set format on CFE config stream: "
-				<< format.toString();
-		return ret;
-	}
+	return 0;
+}
+
+int PiSPCameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig)
+{
+	/* What we call the cfeFormat here is also the input format for the ISP (Back End). */
+	V4L2DeviceFormat cfeFormat;
 
 	/*
-	 * Configure the CFE embedded data output format only if the sensor
-	 * supports it.
+	 * First configure the CFE (if it's being used). In the case of memory cameras
+	 * (Bayer reprocessing), there's no CFE but the raw input stream should already
+	 * contain the correct ISP input format.
 	 */
-	V4L2SubdeviceFormat embeddedFormat;
-	if (sensorMetadata_) {
-		sensor_->device()->getFormat(1, &embeddedFormat);
-		format = {};
-		format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
-		format.planes[0].size = embeddedFormat.size.width * embeddedFormat.size.height;
-
-		LOG(RPI, Debug) << "Setting embedded data format " << format.toString();
-		ret = cfe_[Cfe::Embedded].dev()->setFormat(&format);
-		if (ret) {
-			LOG(RPI, Error) << "Failed to set format on CFE embedded: "
-					<< format;
+	if (cfe_[Cfe::Output0].dev()) {
+		/* Regular sensor. */
+		int ret = platformConfigureCfe(rpiConfig, cfeFormat);
+		if (ret)
 			return ret;
-		}
+	} else {
+		/* Memory camera. */
+		cfeFormat = rpiConfig->rawStreams_[0].format;
+		rpiConfig->rawStreams_[0].cfg->setStream(&isp_[Isp::Input]);
 	}
 
-	configureEntities(rpiConfig->sensorFormat_, embeddedFormat);
-	configureCfe();
-
-	if (beEnabled_)
-		configureBe(rpiConfig->yuvColorSpace_);
-
-	return 0;
+	/* Finally configure the back end ISP. */
+	return platformConfigureIsp(rpiConfig, cfeFormat);
 }
 
 void PiSPCameraData::platformStart()
