diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index 0bab3bed..0119b1ca 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -281,6 +281,24 @@ RPiCameraConfiguration::RPiCameraConfiguration(const RPiCameraData *data)
 {
 }
 
+static bool validateColorSpace(ColorSpace &colorSpace)
+{
+	const std::vector<ColorSpace> validColorSpaces = {
+		ColorSpace::JFIF,
+		ColorSpace::SMPTE170M,
+		ColorSpace::REC709,
+		ColorSpace::REC2020,
+		ColorSpace::VIDEO,
+	};
+	auto it = std::find_if(validColorSpaces.begin(), validColorSpaces.end(),
+			       [&colorSpace](const ColorSpace &item) { return colorSpace == item; });
+	if (it != validColorSpaces.end())
+		return true;
+
+	colorSpace = ColorSpace::JFIF;
+	return false;
+}
+
 CameraConfiguration::Status RPiCameraConfiguration::validate()
 {
 	Status status = Valid;
@@ -346,6 +364,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 	unsigned int rawCount = 0, outCount = 0, count = 0, maxIndex = 0;
 	std::pair<int, Size> outSize[2];
 	Size maxSize;
+	ColorSpace colorSpace; /* colour space for all non-raw output streams */
 	for (StreamConfiguration &cfg : config_) {
 		if (isRaw(cfg.pixelFormat)) {
 			/*
@@ -354,6 +373,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 			 */
 			V4L2VideoDevice::Formats fmts = data_->unicam_[Unicam::Image].dev()->formats();
 			V4L2DeviceFormat sensorFormat = findBestMode(fmts, cfg.size);
+			sensorFormat.colorSpace = ColorSpace::RAW;
 			int ret = data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat);
 			if (ret)
 				return Invalid;
@@ -392,6 +412,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 			if (maxSize < cfg.size) {
 				maxSize = cfg.size;
 				maxIndex = outCount;
+				colorSpace = cfg.colorSpace;
 			}
 			outCount++;
 		}
@@ -405,6 +426,10 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 		}
 	}
 
+	/* Ensure the output colour space (if present) is one we handle. */
+	if (outCount)
+		validateColorSpace(colorSpace);
+
 	/*
 	 * Now do any fixups needed. For the two ISP outputs, one stream must be
 	 * equal or smaller than the other in all dimensions.
@@ -446,10 +471,26 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 			status = Adjusted;
 		}
 
+		/* Output streams must share the same colour space. */
+		if (cfg.colorSpace != colorSpace) {
+			LOG(RPI, Warning) << "Output stream " << (i == maxIndex ? 0 : 1)
+					  << " colour space changed";
+			cfg.colorSpace = colorSpace;
+			status = Adjusted;
+		}
+
 		V4L2DeviceFormat format;
 		format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
 
+		/*
+		 * Request a sensible colour space. Note that "VIDEO" isn't a real
+		 * encoding, so substitute something else sensible.
+		 */
+		format.colorSpace = colorSpace;
+		if (format.colorSpace.encoding == ColorSpace::Encoding::VIDEO)
+			format.colorSpace.encoding = ColorSpace::Encoding::REC601;
+
 		int ret = dev->tryFormat(&format);
 		if (ret)
 			return Invalid;
@@ -475,6 +516,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 	V4L2DeviceFormat sensorFormat;
 	unsigned int bufferCount;
 	PixelFormat pixelFormat;
+	ColorSpace colorSpace;
 	V4L2VideoDevice::Formats fmts;
 	Size size;
 
@@ -490,6 +532,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			fmts = data->unicam_[Unicam::Image].dev()->formats();
 			sensorFormat = findBestMode(fmts, size);
 			pixelFormat = sensorFormat.fourcc.toPixelFormat();
+			colorSpace = ColorSpace::RAW;
 			ASSERT(pixelFormat.isValid());
 			bufferCount = 2;
 			rawCount++;
@@ -501,6 +544,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			/* Return the largest sensor resolution. */
 			size = data->sensor_->resolution();
 			bufferCount = 1;
+			colorSpace = ColorSpace::JFIF;
 			outCount++;
 			break;
 
@@ -517,6 +561,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			pixelFormat = formats::YUV420;
 			size = { 1920, 1080 };
 			bufferCount = 4;
+			colorSpace = ColorSpace::VIDEO;
 			outCount++;
 			break;
 
@@ -525,6 +570,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			pixelFormat = formats::ARGB8888;
 			size = { 800, 600 };
 			bufferCount = 4;
+			colorSpace = ColorSpace::JFIF;
 			outCount++;
 			break;
 
@@ -554,6 +600,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		StreamConfiguration cfg(formats);
 		cfg.size = size;
 		cfg.pixelFormat = pixelFormat;
+		cfg.colorSpace = colorSpace;
 		cfg.bufferCount = bufferCount;
 		config->addConfiguration(cfg);
 	}
@@ -575,6 +622,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 	Size maxSize, sensorSize;
 	unsigned int maxIndex = 0;
 	bool rawStream = false;
+	ColorSpace colorSpace; /* colour space for all non-raw output streams */
 
 	/*
 	 * Look for the RAW stream (if given) size as well as the largest
@@ -593,14 +641,40 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		} else {
 			if (cfg.size > maxSize) {
 				maxSize = config->at(i).size;
+				colorSpace = config->at(i).colorSpace;
 				maxIndex = i;
 			}
 		}
 	}
 
+	if (maxSize.isNull()) {
+		/*
+		 * No non-raw streams, so some will get made below. Doesn't matter
+		 * what colour space we assign to them.
+		 */
+		colorSpace = ColorSpace::JFIF;
+	} else {
+		/* Make sure we can handle this colour space. */
+		validateColorSpace(colorSpace);
+
+		/*
+		 * The "VIDEO" colour encoding means that we choose one of REC601,
+		 * REC709 or REC2020 automatically according to the resolution.
+		 */
+		if (colorSpace.encoding == ColorSpace::Encoding::VIDEO) {
+			if (maxSize.width >= 3840)
+				colorSpace.encoding = ColorSpace::Encoding::REC2020;
+			else if (maxSize.width >= 1280)
+				colorSpace.encoding = ColorSpace::Encoding::REC709;
+			else
+				colorSpace.encoding = ColorSpace::Encoding::REC601;
+		}
+	}
+
 	/* First calculate the best sensor mode we can use based on the user request. */
 	V4L2VideoDevice::Formats fmts = data->unicam_[Unicam::Image].dev()->formats();
 	V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? sensorSize : maxSize);
+	sensorFormat.colorSpace = ColorSpace::RAW;
 
 	/*
 	 * Unicam image output format. The ISP input format gets set at start,
@@ -638,11 +712,18 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		StreamConfiguration &cfg = config->at(i);
 
 		if (isRaw(cfg.pixelFormat)) {
+			cfg.colorSpace = ColorSpace::RAW;
 			cfg.setStream(&data->unicam_[Unicam::Image]);
 			data->unicam_[Unicam::Image].setExternal(true);
 			continue;
 		}
 
+		/* All other streams share the same colour space. */
+		if (cfg.colorSpace != colorSpace) {
+			LOG(RPI, Warning) << "Stream " << i << " colour space changed";
+			cfg.colorSpace = colorSpace;
+		}
+
 		/* The largest resolution gets routed to the ISP Output 0 node. */
 		RPi::Stream *stream = i == maxIndex ? &data->isp_[Isp::Output0]
 						    : &data->isp_[Isp::Output1];
@@ -650,6 +731,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		V4L2PixelFormat fourcc = stream->dev()->toV4L2PixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
 		format.fourcc = fourcc;
+		format.colorSpace = cfg.colorSpace;
 
 		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
 				<< format.toString();
@@ -689,6 +771,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		format = {};
 		format.size = maxSize;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420, false);
+		format.colorSpace = colorSpace;
 		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
 		if (ret) {
 			LOG(RPI, Error)
@@ -718,6 +801,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		const Size limit = maxDimensions.boundedToAspectRatio(format.size);
 
 		output1Format.size = (format.size / 2).boundedTo(limit).alignedDownTo(2, 2);
+		output1Format.colorSpace = colorSpace;
 
 		LOG(RPI, Debug) << "Setting ISP Output1 (internal) to "
 				<< output1Format.toString();
