[libcamera-devel,v9,8/8] libcamera: pipeline: raspberrypi: Support color spaces
diff mbox series

Message ID 20211206105032.13876-9-david.plowman@raspberrypi.com
State Superseded
Headers show
Series
  • Colour spaces
Related show

Commit Message

David Plowman Dec. 6, 2021, 10:50 a.m. UTC
The Raspberry Pi pipeline handler now sets color spaces correctly.

In generateConfiguration() it sets them to reasonable default values
based on the stream role.

validate() now calls validateColorSpaces() to ensure that the
requested color spaces are sensible, before proceeding to check what
the hardware can deliver.

Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
---
 .../pipeline/raspberrypi/raspberrypi.cpp      | 43 +++++++++++++++++++
 1 file changed, 43 insertions(+)

Comments

Laurent Pinchart Dec. 7, 2021, 1:57 p.m. UTC | #1
Hi David,

Thank you for the patch.

On Mon, Dec 06, 2021 at 10:50:31AM +0000, David Plowman wrote:
> The Raspberry Pi pipeline handler now sets color spaces correctly.
> 
> In generateConfiguration() it sets them to reasonable default values
> based on the stream role.
> 
> validate() now calls validateColorSpaces() to ensure that the
> requested color spaces are sensible, before proceeding to check what
> the hardware can deliver.
> 
> Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
> ---
>  .../pipeline/raspberrypi/raspberrypi.cpp      | 43 +++++++++++++++++++
>  1 file changed, 43 insertions(+)
> 
> diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> index 321b72ad..eb4fbf90 100644
> --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> @@ -97,6 +97,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
>  
>  	deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
>  	deviceFormat.size = format.size;
> +	deviceFormat.colorSpace = format.colorSpace;
>  	return deviceFormat;
>  }
>  
> @@ -133,6 +134,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
>  {
>  	double bestScore = std::numeric_limits<double>::max(), score;
>  	V4L2SubdeviceFormat bestFormat;
> +	bestFormat.colorSpace = ColorSpace::Raw;
>  
>  	constexpr float penaltyAr = 1500.0;
>  	constexpr float penaltyBitDepth = 500.0;
> @@ -330,6 +332,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
>  	if (config_.empty())
>  		return Invalid;
>  
> +	status = validateColorSpaces(true);

Wouldn't it be better to validate colour spaces after pixel formats have
been validated ? Otherwise your isRaw() check during colour space
validation may not match with the pixel format after validation.

> +
>  	/*
>  	 * What if the platform has a non-90 degree rotation? We can't even
>  	 * "adjust" the configuration and carry on. Alternatively, raising an
> @@ -497,11 +501,23 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
>  		V4L2DeviceFormat format;
>  		format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
>  		format.size = cfg.size;
> +		format.colorSpace = cfg.colorSpace;

I'd add a blank line here.

> +		LOG(RPI, Debug)
> +			<< "Try color space " << ColorSpace::toString(cfg.colorSpace);
>  
>  		int ret = dev->tryFormat(&format);
>  		if (ret)
>  			return Invalid;
>  
> +		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> +			status = Adjusted;
> +			LOG(RPI, Warning)
> +				<< "Color space changed from "
> +				<< ColorSpace::toString(cfg.colorSpace) << " to "
> +				<< ColorSpace::toString(format.colorSpace);

Warning or Debug ?

> +		}

And here too.

> +		cfg.colorSpace = format.colorSpace;
> +
>  		cfg.stride = format.planes[0].bpl;
>  		cfg.frameSize = format.planes[0].size;
>  
> @@ -525,6 +541,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  	PixelFormat pixelFormat;
>  	V4L2VideoDevice::Formats fmts;
>  	Size size;
> +	std::optional<ColorSpace> colorSpace;
>  
>  	if (roles.empty())
>  		return config;
> @@ -539,6 +556,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  			pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
>  							    BayerFormat::Packing::CSI2);
>  			ASSERT(pixelFormat.isValid());
> +			colorSpace = ColorSpace::Raw;
>  			bufferCount = 2;
>  			rawCount++;
>  			break;
> @@ -546,6 +564,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		case StreamRole::StillCapture:
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::NV12;
> +			colorSpace = ColorSpace::Jpeg;
>  			/* Return the largest sensor resolution. */
>  			size = data->sensor_->resolution();
>  			bufferCount = 1;
> @@ -563,6 +582,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  			 */
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::YUV420;
> +			/* This will be reasonable for many applications. */
> +			colorSpace = ColorSpace::Rec709;

Why does NV12 produce Jpeg and YUV420 Rec709, could you document the
rationale ?

>  			size = { 1920, 1080 };
>  			bufferCount = 4;
>  			outCount++;
> @@ -571,6 +592,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		case StreamRole::Viewfinder:
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::ARGB8888;
> +			colorSpace = ColorSpace::Jpeg;
>  			size = { 800, 600 };
>  			bufferCount = 4;
>  			outCount++;
> @@ -602,6 +624,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		StreamConfiguration cfg(formats);
>  		cfg.size = size;
>  		cfg.pixelFormat = pixelFormat;
> +		cfg.colorSpace = colorSpace;
>  		cfg.bufferCount = bufferCount;
>  		config->addConfiguration(cfg);
>  	}
> @@ -709,6 +732,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  		V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
>  		format.size = cfg.size;
>  		format.fourcc = fourcc;
> +		format.colorSpace = cfg.colorSpace;
>  
>  		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
>  				<< format.toString();
> @@ -724,6 +748,23 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  			return -EINVAL;
>  		}
>  
> +		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> +			/*
> +			 * We should have been through validate() before so this
> +			 * shouldn't be possible, but we mustn't sweep color space
> +			 * problems under the carpet.
> +			 */

There's a guarantee that the configuration has been validated, so this
really can't happen.

> +			LOG(RPI, Warning)
> +				<< "Unexpected color space ("
> +				<< ColorSpace::toString(cfg.colorSpace) << " to "
> +				<< ColorSpace::toString(format.colorSpace) << ") in stream "
> +				<< stream->name();
> +			cfg.colorSpace = format.colorSpace;
> +		}
> +		LOG(RPI, Debug)
> +			<< "Stream " << stream->name() << " has color space "
> +			<< ColorSpace::toString(cfg.colorSpace);
> +
>  		cfg.setStream(stream);
>  		stream->setExternal(true);
>  
> @@ -748,6 +789,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  		format = {};
>  		format.size = maxSize;
>  		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
> +		/* No one asked for output, so the color space doesn't matter. */
> +		format.colorSpace = ColorSpace::Jpeg;
>  		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
>  		if (ret) {
>  			LOG(RPI, Error)

Patch
diff mbox series

diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index 321b72ad..eb4fbf90 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -97,6 +97,7 @@  V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
 
 	deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
 	deviceFormat.size = format.size;
+	deviceFormat.colorSpace = format.colorSpace;
 	return deviceFormat;
 }
 
@@ -133,6 +134,7 @@  V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
 {
 	double bestScore = std::numeric_limits<double>::max(), score;
 	V4L2SubdeviceFormat bestFormat;
+	bestFormat.colorSpace = ColorSpace::Raw;
 
 	constexpr float penaltyAr = 1500.0;
 	constexpr float penaltyBitDepth = 500.0;
@@ -330,6 +332,8 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 	if (config_.empty())
 		return Invalid;
 
+	status = validateColorSpaces(true);
+
 	/*
 	 * What if the platform has a non-90 degree rotation? We can't even
 	 * "adjust" the configuration and carry on. Alternatively, raising an
@@ -497,11 +501,23 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 		V4L2DeviceFormat format;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
+		format.colorSpace = cfg.colorSpace;
+		LOG(RPI, Debug)
+			<< "Try color space " << ColorSpace::toString(cfg.colorSpace);
 
 		int ret = dev->tryFormat(&format);
 		if (ret)
 			return Invalid;
 
+		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
+			status = Adjusted;
+			LOG(RPI, Warning)
+				<< "Color space changed from "
+				<< ColorSpace::toString(cfg.colorSpace) << " to "
+				<< ColorSpace::toString(format.colorSpace);
+		}
+		cfg.colorSpace = format.colorSpace;
+
 		cfg.stride = format.planes[0].bpl;
 		cfg.frameSize = format.planes[0].size;
 
@@ -525,6 +541,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 	PixelFormat pixelFormat;
 	V4L2VideoDevice::Formats fmts;
 	Size size;
+	std::optional<ColorSpace> colorSpace;
 
 	if (roles.empty())
 		return config;
@@ -539,6 +556,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
 							    BayerFormat::Packing::CSI2);
 			ASSERT(pixelFormat.isValid());
+			colorSpace = ColorSpace::Raw;
 			bufferCount = 2;
 			rawCount++;
 			break;
@@ -546,6 +564,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::StillCapture:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::NV12;
+			colorSpace = ColorSpace::Jpeg;
 			/* Return the largest sensor resolution. */
 			size = data->sensor_->resolution();
 			bufferCount = 1;
@@ -563,6 +582,8 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			 */
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::YUV420;
+			/* This will be reasonable for many applications. */
+			colorSpace = ColorSpace::Rec709;
 			size = { 1920, 1080 };
 			bufferCount = 4;
 			outCount++;
@@ -571,6 +592,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::Viewfinder:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::ARGB8888;
+			colorSpace = ColorSpace::Jpeg;
 			size = { 800, 600 };
 			bufferCount = 4;
 			outCount++;
@@ -602,6 +624,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		StreamConfiguration cfg(formats);
 		cfg.size = size;
 		cfg.pixelFormat = pixelFormat;
+		cfg.colorSpace = colorSpace;
 		cfg.bufferCount = bufferCount;
 		config->addConfiguration(cfg);
 	}
@@ -709,6 +732,7 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
 		format.fourcc = fourcc;
+		format.colorSpace = cfg.colorSpace;
 
 		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
 				<< format.toString();
@@ -724,6 +748,23 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 			return -EINVAL;
 		}
 
+		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
+			/*
+			 * We should have been through validate() before so this
+			 * shouldn't be possible, but we mustn't sweep color space
+			 * problems under the carpet.
+			 */
+			LOG(RPI, Warning)
+				<< "Unexpected color space ("
+				<< ColorSpace::toString(cfg.colorSpace) << " to "
+				<< ColorSpace::toString(format.colorSpace) << ") in stream "
+				<< stream->name();
+			cfg.colorSpace = format.colorSpace;
+		}
+		LOG(RPI, Debug)
+			<< "Stream " << stream->name() << " has color space "
+			<< ColorSpace::toString(cfg.colorSpace);
+
 		cfg.setStream(stream);
 		stream->setExternal(true);
 
@@ -748,6 +789,8 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		format = {};
 		format.size = maxSize;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
+		/* No one asked for output, so the color space doesn't matter. */
+		format.colorSpace = ColorSpace::Jpeg;
 		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
 		if (ret) {
 			LOG(RPI, Error)