[libcamera-devel,v7,7/7] libcamera: pipeline: raspberrypi: Support color spaces
diff mbox series

Message ID 20211126104045.4756-8-david.plowman@raspberrypi.com
State Superseded
Headers show
Series
  • Colour spaces
Related show

Commit Message

David Plowman Nov. 26, 2021, 10:40 a.m. UTC
The Raspberry Pi pipeline handler now sets color spaces correctly.

In generateConfiguration() it sets them to reasonable default values
based on the stream role.

validate() now calls validateColorSpaces() to ensure that the
requested color spaces are sensible, before proceeding to check what
the hardware can deliver.

Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
---
 .../pipeline/raspberrypi/raspberrypi.cpp      | 42 +++++++++++++++++++
 1 file changed, 42 insertions(+)

Comments

Jacopo Mondi Nov. 30, 2021, 8:46 p.m. UTC | #1
On Fri, Nov 26, 2021 at 10:40:45AM +0000, David Plowman wrote:
> The Raspberry Pi pipeline handler now sets color spaces correctly.
>
> In generateConfiguration() it sets them to reasonable default values
> based on the stream role.
>
> validate() now calls validateColorSpaces() to ensure that the
> requested color spaces are sensible, before proceeding to check what
> the hardware can deliver.
>
> Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
> ---
>  .../pipeline/raspberrypi/raspberrypi.cpp      | 42 +++++++++++++++++++
>  1 file changed, 42 insertions(+)
>
> diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> index ad526a8b..f0ec23cb 100644
> --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> @@ -93,6 +93,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
>
>  	deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
>  	deviceFormat.size = format.size;
> +	deviceFormat.colorSpace = format.colorSpace;
>  	return deviceFormat;
>  }
>
> @@ -129,6 +130,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
>  {
>  	double bestScore = std::numeric_limits<double>::max(), score;
>  	V4L2SubdeviceFormat bestFormat;
> +	bestFormat.colorSpace = ColorSpace::Raw;
>
>  #define PENALTY_AR		1500.0
>  #define PENALTY_8BIT		2000.0
> @@ -335,6 +337,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
>  	if (config_.empty())
>  		return Invalid;
>
> +	status = validateColorSpaces(true);
> +
>  	/*
>  	 * What if the platform has a non-90 degree rotation? We can't even
>  	 * "adjust" the configuration and carry on. Alternatively, raising an
> @@ -497,10 +501,21 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
>  		V4L2DeviceFormat format;
>  		format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
>  		format.size = cfg.size;
> +		format.colorSpace = cfg.colorSpace;
> +		LOG(RPI, Debug)
> +			<< "Try color space " << ColorSpace::toString(cfg.colorSpace);

Is this required ? There are no prinouts for the other
V4L2DeviceFormat fields..

>
>  		int ret = dev->tryFormat(&format);
>  		if (ret)
>  			return Invalid;

Blank line ?

> +		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> +			status = Adjusted;
> +			LOG(RPI, Warning)
> +				<< "Color space changed from "
> +				<< ColorSpace::toString(cfg.colorSpace) << " to "
> +				<< ColorSpace::toString(format.colorSpace);
> +		}
> +		cfg.colorSpace = format.colorSpace;

Do you need to validateColorSpaces() again at the end ?
>
>  		cfg.stride = format.planes[0].bpl;
>  		cfg.frameSize = format.planes[0].size;
> @@ -525,6 +540,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  	PixelFormat pixelFormat;
>  	V4L2VideoDevice::Formats fmts;
>  	Size size;
> +	std::optional<ColorSpace> colorSpace;
>
>  	if (roles.empty())
>  		return config;
> @@ -539,6 +555,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  			pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
>  							    BayerFormat::Packing::CSI2);
>  			ASSERT(pixelFormat.isValid());
> +			colorSpace = ColorSpace::Raw;
>  			bufferCount = 2;
>  			rawCount++;
>  			break;
> @@ -546,6 +563,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		case StreamRole::StillCapture:
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::NV12;
> +			colorSpace = ColorSpace::Jpeg;
>  			/* Return the largest sensor resolution. */
>  			size = data->sensor_->resolution();
>  			bufferCount = 1;
> @@ -563,6 +581,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  			 */
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::YUV420;
> +			/* This will be reasonable for many applications. */
> +			colorSpace = ColorSpace::Rec709;
>  			size = { 1920, 1080 };
>  			bufferCount = 4;
>  			outCount++;
> @@ -571,6 +591,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		case StreamRole::Viewfinder:
>  			fmts = data->isp_[Isp::Output0].dev()->formats();
>  			pixelFormat = formats::ARGB8888;
> +			colorSpace = ColorSpace::Jpeg;
>  			size = { 800, 600 };
>  			bufferCount = 4;
>  			outCount++;
> @@ -602,6 +623,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>  		StreamConfiguration cfg(formats);
>  		cfg.size = size;
>  		cfg.pixelFormat = pixelFormat;
> +		cfg.colorSpace = colorSpace;
>  		cfg.bufferCount = bufferCount;
>  		config->addConfiguration(cfg);
>  	}
> @@ -706,6 +728,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  		V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
>  		format.size = cfg.size;
>  		format.fourcc = fourcc;
> +		format.colorSpace = cfg.colorSpace;
>
>  		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
>  				<< format.toString();
> @@ -721,6 +744,23 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  			return -EINVAL;
>  		}
>
> +		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> +			/*
> +			 * We should have been through validate() before so this
> +			 * shouldn't be possible, but we mustn't sweep color space
> +			 * problems under the carpet.
> +			 */

How can this happen then ?

> +			LOG(RPI, Warning)
> +				<< "Unexpected color space ("
> +				<< ColorSpace::toString(cfg.colorSpace) << " to "
> +				<< ColorSpace::toString(format.colorSpace) << ") in stream "
> +				<< stream->name();
> +			cfg.colorSpace = format.colorSpace;
> +		}
> +		LOG(RPI, Debug)
> +			<< "Stream " << stream->name() << " has color space "
> +			<< ColorSpace::toString(cfg.colorSpace);
> +

Really a lot of debug around color spaces :)

>  		cfg.setStream(stream);
>  		stream->setExternal(true);
>
> @@ -745,6 +785,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
>  		format = {};
>  		format.size = maxSize;
>  		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
> +		/* No one asked for output, so the color space doesn't matter. */
> +		format.colorSpace = ColorSpace::Jpeg;
>  		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
>  		if (ret) {
>  			LOG(RPI, Error)
> --
> 2.30.2
>
David Plowman Dec. 1, 2021, 3:05 p.m. UTC | #2
Hi Jacopo

Thanks for the review!

On Tue, 30 Nov 2021 at 20:45, Jacopo Mondi <jacopo@jmondi.org> wrote:
>
> On Fri, Nov 26, 2021 at 10:40:45AM +0000, David Plowman wrote:
> > The Raspberry Pi pipeline handler now sets color spaces correctly.
> >
> > In generateConfiguration() it sets them to reasonable default values
> > based on the stream role.
> >
> > validate() now calls validateColorSpaces() to ensure that the
> > requested color spaces are sensible, before proceeding to check what
> > the hardware can deliver.
> >
> > Signed-off-by: David Plowman <david.plowman@raspberrypi.com>
> > ---
> >  .../pipeline/raspberrypi/raspberrypi.cpp      | 42 +++++++++++++++++++
> >  1 file changed, 42 insertions(+)
> >
> > diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> > index ad526a8b..f0ec23cb 100644
> > --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> > +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> > @@ -93,6 +93,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
> >
> >       deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
> >       deviceFormat.size = format.size;
> > +     deviceFormat.colorSpace = format.colorSpace;
> >       return deviceFormat;
> >  }
> >
> > @@ -129,6 +130,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
> >  {
> >       double bestScore = std::numeric_limits<double>::max(), score;
> >       V4L2SubdeviceFormat bestFormat;
> > +     bestFormat.colorSpace = ColorSpace::Raw;
> >
> >  #define PENALTY_AR           1500.0
> >  #define PENALTY_8BIT         2000.0
> > @@ -335,6 +337,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
> >       if (config_.empty())
> >               return Invalid;
> >
> > +     status = validateColorSpaces(true);
> > +
> >       /*
> >        * What if the platform has a non-90 degree rotation? We can't even
> >        * "adjust" the configuration and carry on. Alternatively, raising an
> > @@ -497,10 +501,21 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
> >               V4L2DeviceFormat format;
> >               format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
> >               format.size = cfg.size;
> > +             format.colorSpace = cfg.colorSpace;
> > +             LOG(RPI, Debug)
> > +                     << "Try color space " << ColorSpace::toString(cfg.colorSpace);
>
> Is this required ? There are no prinouts for the other
> V4L2DeviceFormat fields..
>
> >
> >               int ret = dev->tryFormat(&format);
> >               if (ret)
> >                       return Invalid;
>
> Blank line ?
>
> > +             if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> > +                     status = Adjusted;
> > +                     LOG(RPI, Warning)
> > +                             << "Color space changed from "
> > +                             << ColorSpace::toString(cfg.colorSpace) << " to "
> > +                             << ColorSpace::toString(format.colorSpace);
> > +             }
> > +             cfg.colorSpace = format.colorSpace;
>
> Do you need to validateColorSpaces() again at the end ?
> >
> >               cfg.stride = format.planes[0].bpl;
> >               cfg.frameSize = format.planes[0].size;
> > @@ -525,6 +540,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >       PixelFormat pixelFormat;
> >       V4L2VideoDevice::Formats fmts;
> >       Size size;
> > +     std::optional<ColorSpace> colorSpace;
> >
> >       if (roles.empty())
> >               return config;
> > @@ -539,6 +555,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >                       pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
> >                                                           BayerFormat::Packing::CSI2);
> >                       ASSERT(pixelFormat.isValid());
> > +                     colorSpace = ColorSpace::Raw;
> >                       bufferCount = 2;
> >                       rawCount++;
> >                       break;
> > @@ -546,6 +563,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >               case StreamRole::StillCapture:
> >                       fmts = data->isp_[Isp::Output0].dev()->formats();
> >                       pixelFormat = formats::NV12;
> > +                     colorSpace = ColorSpace::Jpeg;
> >                       /* Return the largest sensor resolution. */
> >                       size = data->sensor_->resolution();
> >                       bufferCount = 1;
> > @@ -563,6 +581,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >                        */
> >                       fmts = data->isp_[Isp::Output0].dev()->formats();
> >                       pixelFormat = formats::YUV420;
> > +                     /* This will be reasonable for many applications. */
> > +                     colorSpace = ColorSpace::Rec709;
> >                       size = { 1920, 1080 };
> >                       bufferCount = 4;
> >                       outCount++;
> > @@ -571,6 +591,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >               case StreamRole::Viewfinder:
> >                       fmts = data->isp_[Isp::Output0].dev()->formats();
> >                       pixelFormat = formats::ARGB8888;
> > +                     colorSpace = ColorSpace::Jpeg;
> >                       size = { 800, 600 };
> >                       bufferCount = 4;
> >                       outCount++;
> > @@ -602,6 +623,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
> >               StreamConfiguration cfg(formats);
> >               cfg.size = size;
> >               cfg.pixelFormat = pixelFormat;
> > +             cfg.colorSpace = colorSpace;
> >               cfg.bufferCount = bufferCount;
> >               config->addConfiguration(cfg);
> >       }
> > @@ -706,6 +728,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
> >               V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
> >               format.size = cfg.size;
> >               format.fourcc = fourcc;
> > +             format.colorSpace = cfg.colorSpace;
> >
> >               LOG(RPI, Debug) << "Setting " << stream->name() << " to "
> >                               << format.toString();
> > @@ -721,6 +744,23 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
> >                       return -EINVAL;
> >               }
> >
> > +             if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
> > +                     /*
> > +                      * We should have been through validate() before so this
> > +                      * shouldn't be possible, but we mustn't sweep color space
> > +                      * problems under the carpet.
> > +                      */
>
> How can this happen then ?
>
> > +                     LOG(RPI, Warning)
> > +                             << "Unexpected color space ("
> > +                             << ColorSpace::toString(cfg.colorSpace) << " to "
> > +                             << ColorSpace::toString(format.colorSpace) << ") in stream "
> > +                             << stream->name();
> > +                     cfg.colorSpace = format.colorSpace;
> > +             }
> > +             LOG(RPI, Debug)
> > +                     << "Stream " << stream->name() << " has color space "
> > +                     << ColorSpace::toString(cfg.colorSpace);
> > +
>
> Really a lot of debug around color spaces :)

Yes, I agree that I'm really quite paranoid about it. Perhaps a bit
too much! But I do have at least some reasons. Firstly, when pixel
formats and buffer sizes go wrong you tend to notice straight away
(when everything crashes) - with colour spaces you don't, so that's
why I put in lots of debug and noisy warnings.

Secondly, our drivers have never really been tested with colour spaces
yet - so I don't particularly trust them. I definitely want to
double-check that they are behaving as I expect.

I'm not sure if they're terribly good reasons, but maybe it helps to
explain a bit!

Thanks
David


David


>
> >               cfg.setStream(stream);
> >               stream->setExternal(true);
> >
> > @@ -745,6 +785,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
> >               format = {};
> >               format.size = maxSize;
> >               format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
> > +             /* No one asked for output, so the color space doesn't matter. */
> > +             format.colorSpace = ColorSpace::Jpeg;
> >               ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
> >               if (ret) {
> >                       LOG(RPI, Error)
> > --
> > 2.30.2
> >

Patch
diff mbox series

diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index ad526a8b..f0ec23cb 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -93,6 +93,7 @@  V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
 
 	deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
 	deviceFormat.size = format.size;
+	deviceFormat.colorSpace = format.colorSpace;
 	return deviceFormat;
 }
 
@@ -129,6 +130,7 @@  V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
 {
 	double bestScore = std::numeric_limits<double>::max(), score;
 	V4L2SubdeviceFormat bestFormat;
+	bestFormat.colorSpace = ColorSpace::Raw;
 
 #define PENALTY_AR		1500.0
 #define PENALTY_8BIT		2000.0
@@ -335,6 +337,8 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 	if (config_.empty())
 		return Invalid;
 
+	status = validateColorSpaces(true);
+
 	/*
 	 * What if the platform has a non-90 degree rotation? We can't even
 	 * "adjust" the configuration and carry on. Alternatively, raising an
@@ -497,10 +501,21 @@  CameraConfiguration::Status RPiCameraConfiguration::validate()
 		V4L2DeviceFormat format;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
+		format.colorSpace = cfg.colorSpace;
+		LOG(RPI, Debug)
+			<< "Try color space " << ColorSpace::toString(cfg.colorSpace);
 
 		int ret = dev->tryFormat(&format);
 		if (ret)
 			return Invalid;
+		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
+			status = Adjusted;
+			LOG(RPI, Warning)
+				<< "Color space changed from "
+				<< ColorSpace::toString(cfg.colorSpace) << " to "
+				<< ColorSpace::toString(format.colorSpace);
+		}
+		cfg.colorSpace = format.colorSpace;
 
 		cfg.stride = format.planes[0].bpl;
 		cfg.frameSize = format.planes[0].size;
@@ -525,6 +540,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 	PixelFormat pixelFormat;
 	V4L2VideoDevice::Formats fmts;
 	Size size;
+	std::optional<ColorSpace> colorSpace;
 
 	if (roles.empty())
 		return config;
@@ -539,6 +555,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
 							    BayerFormat::Packing::CSI2);
 			ASSERT(pixelFormat.isValid());
+			colorSpace = ColorSpace::Raw;
 			bufferCount = 2;
 			rawCount++;
 			break;
@@ -546,6 +563,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::StillCapture:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::NV12;
+			colorSpace = ColorSpace::Jpeg;
 			/* Return the largest sensor resolution. */
 			size = data->sensor_->resolution();
 			bufferCount = 1;
@@ -563,6 +581,8 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			 */
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::YUV420;
+			/* This will be reasonable for many applications. */
+			colorSpace = ColorSpace::Rec709;
 			size = { 1920, 1080 };
 			bufferCount = 4;
 			outCount++;
@@ -571,6 +591,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::Viewfinder:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::ARGB8888;
+			colorSpace = ColorSpace::Jpeg;
 			size = { 800, 600 };
 			bufferCount = 4;
 			outCount++;
@@ -602,6 +623,7 @@  CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		StreamConfiguration cfg(formats);
 		cfg.size = size;
 		cfg.pixelFormat = pixelFormat;
+		cfg.colorSpace = colorSpace;
 		cfg.bufferCount = bufferCount;
 		config->addConfiguration(cfg);
 	}
@@ -706,6 +728,7 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
 		format.fourcc = fourcc;
+		format.colorSpace = cfg.colorSpace;
 
 		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
 				<< format.toString();
@@ -721,6 +744,23 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 			return -EINVAL;
 		}
 
+		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
+			/*
+			 * We should have been through validate() before so this
+			 * shouldn't be possible, but we mustn't sweep color space
+			 * problems under the carpet.
+			 */
+			LOG(RPI, Warning)
+				<< "Unexpected color space ("
+				<< ColorSpace::toString(cfg.colorSpace) << " to "
+				<< ColorSpace::toString(format.colorSpace) << ") in stream "
+				<< stream->name();
+			cfg.colorSpace = format.colorSpace;
+		}
+		LOG(RPI, Debug)
+			<< "Stream " << stream->name() << " has color space "
+			<< ColorSpace::toString(cfg.colorSpace);
+
 		cfg.setStream(stream);
 		stream->setExternal(true);
 
@@ -745,6 +785,8 @@  int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		format = {};
 		format.size = maxSize;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
+		/* No one asked for output, so the color space doesn't matter. */
+		format.colorSpace = ColorSpace::Jpeg;
 		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
 		if (ret) {
 			LOG(RPI, Error)