Message ID | 20211210144424.14747-9-david.plowman@raspberrypi.com |
---|---|
State | Accepted |
Headers | show |
Series |
|
Related | show |
Hi David, On Fri, Dec 10, 2021 at 02:44:24PM +0000, David Plowman wrote: > The Raspberry Pi pipeline handler now sets color spaces correctly. > > In generateConfiguration() it sets them to reasonable default values > based on the stream role. > > validate() now calls validateColorSpaces() to ensure that the > requested color spaces are sensible, before proceeding to check what > the hardware can deliver. > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > --- > .../pipeline/raspberrypi/raspberrypi.cpp | 40 +++++++++++++++++++ > 1 file changed, 40 insertions(+) > > diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > index 86851ac4..eb74d96c 100644 > --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > @@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format, > > deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix); > deviceFormat.size = format.size; > + deviceFormat.colorSpace = format.colorSpace; > return deviceFormat; > } > > @@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size & > { > double bestScore = std::numeric_limits<double>::max(), score; > V4L2SubdeviceFormat bestFormat; > + bestFormat.colorSpace = ColorSpace::Raw; > > constexpr float penaltyAr = 1500.0; > constexpr float penaltyBitDepth = 500.0; > @@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > if (config_.empty()) > return Invalid; > > + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); > + > /* > * What if the platform has a non-90 degree rotation? We can't even > * "adjust" the configuration and carry on. Alternatively, raising an > @@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > V4L2DeviceFormat format; > format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > format.size = cfg.size; > + format.colorSpace = cfg.colorSpace; > + > + LOG(RPI, Debug) > + << "Try color space " << ColorSpace::toString(cfg.colorSpace); > > int ret = dev->tryFormat(&format); > if (ret) > return Invalid; > > + if (!format.colorSpace || cfg.colorSpace != format.colorSpace) { What if format.colorSpace was nullopt before validate ? Isn't it enough to check for cfg.colorSpace != format.colorSpace ? If this is otherwise intended: Reviewed-by: Jacopo Mondi <jacopo@jmondi.org> Thanks j > + status = Adjusted; > + LOG(RPI, Debug) > + << "Color space changed from " > + << ColorSpace::toString(cfg.colorSpace) << " to " > + << ColorSpace::toString(format.colorSpace); > + } > + > + cfg.colorSpace = format.colorSpace; > + > cfg.stride = format.planes[0].bpl; > cfg.frameSize = format.planes[0].size; > > @@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > PixelFormat pixelFormat; > V4L2VideoDevice::Formats fmts; > Size size; > + std::optional<ColorSpace> colorSpace; > > if (roles.empty()) > return config; > @@ -539,6 +558,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code, > BayerFormat::Packing::CSI2); > ASSERT(pixelFormat.isValid()); > + colorSpace = ColorSpace::Raw; > bufferCount = 2; > rawCount++; > break; > @@ -546,6 +566,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > case StreamRole::StillCapture: > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::NV12; > + /* > + * Still image codecs usually expect the JPEG color space. > + * Even RGB codecs will be fine as the RGB we get with the > + * JPEG color space is the same as sRGB. > + */ > + colorSpace = ColorSpace::Jpeg; > /* Return the largest sensor resolution. */ > size = sensorSize; > bufferCount = 1; > @@ -563,6 +589,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > */ > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::YUV420; > + /* > + * Choose a color space appropriate for video recording. > + * Rec.709 will be a good default for HD resolutions. > + */ > + colorSpace = ColorSpace::Rec709; > size = { 1920, 1080 }; > bufferCount = 4; > outCount++; > @@ -571,6 +602,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > case StreamRole::Viewfinder: > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::ARGB8888; > + colorSpace = ColorSpace::Jpeg; > size = { 800, 600 }; > bufferCount = 4; > outCount++; > @@ -617,6 +649,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > StreamConfiguration cfg(formats); > cfg.size = size; > cfg.pixelFormat = pixelFormat; > + cfg.colorSpace = colorSpace; > cfg.bufferCount = bufferCount; > config->addConfiguration(cfg); > } > @@ -724,6 +757,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > format.size = cfg.size; > format.fourcc = fourcc; > + format.colorSpace = cfg.colorSpace; > > LOG(RPI, Debug) << "Setting " << stream->name() << " to " > << format.toString(); > @@ -739,6 +773,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > return -EINVAL; > } > > + LOG(RPI, Debug) > + << "Stream " << stream->name() << " has color space " > + << ColorSpace::toString(cfg.colorSpace); > + > cfg.setStream(stream); > stream->setExternal(true); > > @@ -763,6 +801,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > format = {}; > format.size = maxSize; > format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); > + /* No one asked for output, so the color space doesn't matter. */ > + format.colorSpace = ColorSpace::Jpeg; > ret = data->isp_[Isp::Output0].dev()->setFormat(&format); > if (ret) { > LOG(RPI, Error) > -- > 2.30.2 >
Hi Jacopo Thanks for looking at this again. On Fri, 10 Dec 2021 at 14:51, Jacopo Mondi <jacopo@jmondi.org> wrote: > > Hi David, > > On Fri, Dec 10, 2021 at 02:44:24PM +0000, David Plowman wrote: > > The Raspberry Pi pipeline handler now sets color spaces correctly. > > > > In generateConfiguration() it sets them to reasonable default values > > based on the stream role. > > > > validate() now calls validateColorSpaces() to ensure that the > > requested color spaces are sensible, before proceeding to check what > > the hardware can deliver. > > > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > > Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > --- > > .../pipeline/raspberrypi/raspberrypi.cpp | 40 +++++++++++++++++++ > > 1 file changed, 40 insertions(+) > > > > diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > index 86851ac4..eb74d96c 100644 > > --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > @@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format, > > > > deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix); > > deviceFormat.size = format.size; > > + deviceFormat.colorSpace = format.colorSpace; > > return deviceFormat; > > } > > > > @@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size & > > { > > double bestScore = std::numeric_limits<double>::max(), score; > > V4L2SubdeviceFormat bestFormat; > > + bestFormat.colorSpace = ColorSpace::Raw; > > > > constexpr float penaltyAr = 1500.0; > > constexpr float penaltyBitDepth = 500.0; > > @@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > > if (config_.empty()) > > return Invalid; > > > > + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); > > + > > /* > > * What if the platform has a non-90 degree rotation? We can't even > > * "adjust" the configuration and carry on. Alternatively, raising an > > @@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > > V4L2DeviceFormat format; > > format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > > format.size = cfg.size; > > + format.colorSpace = cfg.colorSpace; > > + > > + LOG(RPI, Debug) > > + << "Try color space " << ColorSpace::toString(cfg.colorSpace); > > > > int ret = dev->tryFormat(&format); > > if (ret) > > return Invalid; > > > > + if (!format.colorSpace || cfg.colorSpace != format.colorSpace) { > > What if format.colorSpace was nullopt before validate ? > Isn't it enough to check for cfg.colorSpace != format.colorSpace ? > > If this is otherwise intended: > Reviewed-by: Jacopo Mondi <jacopo@jmondi.org> > > Thanks > j Yes, perhaps I've been fiddling very slightly with all this for just a bit too long. I guess one might just drop the "format.colorSpace ||" from the line above (as you suggest) and it would be strictly correct (and fine). Laurent, is that fixable-while-applying or would another version of this specific patch be better? Thanks David > > > > + status = Adjusted; > > + LOG(RPI, Debug) > > + << "Color space changed from " > > + << ColorSpace::toString(cfg.colorSpace) << " to " > > + << ColorSpace::toString(format.colorSpace); > > + } > > + > > + cfg.colorSpace = format.colorSpace; > > + > > cfg.stride = format.planes[0].bpl; > > cfg.frameSize = format.planes[0].size; > > > > @@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > PixelFormat pixelFormat; > > V4L2VideoDevice::Formats fmts; > > Size size; > > + std::optional<ColorSpace> colorSpace; > > > > if (roles.empty()) > > return config; > > @@ -539,6 +558,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code, > > BayerFormat::Packing::CSI2); > > ASSERT(pixelFormat.isValid()); > > + colorSpace = ColorSpace::Raw; > > bufferCount = 2; > > rawCount++; > > break; > > @@ -546,6 +566,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > case StreamRole::StillCapture: > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > pixelFormat = formats::NV12; > > + /* > > + * Still image codecs usually expect the JPEG color space. > > + * Even RGB codecs will be fine as the RGB we get with the > > + * JPEG color space is the same as sRGB. > > + */ > > + colorSpace = ColorSpace::Jpeg; > > /* Return the largest sensor resolution. */ > > size = sensorSize; > > bufferCount = 1; > > @@ -563,6 +589,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > */ > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > pixelFormat = formats::YUV420; > > + /* > > + * Choose a color space appropriate for video recording. > > + * Rec.709 will be a good default for HD resolutions. > > + */ > > + colorSpace = ColorSpace::Rec709; > > size = { 1920, 1080 }; > > bufferCount = 4; > > outCount++; > > @@ -571,6 +602,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > case StreamRole::Viewfinder: > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > pixelFormat = formats::ARGB8888; > > + colorSpace = ColorSpace::Jpeg; > > size = { 800, 600 }; > > bufferCount = 4; > > outCount++; > > @@ -617,6 +649,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > StreamConfiguration cfg(formats); > > cfg.size = size; > > cfg.pixelFormat = pixelFormat; > > + cfg.colorSpace = colorSpace; > > cfg.bufferCount = bufferCount; > > config->addConfiguration(cfg); > > } > > @@ -724,6 +757,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > > format.size = cfg.size; > > format.fourcc = fourcc; > > + format.colorSpace = cfg.colorSpace; > > > > LOG(RPI, Debug) << "Setting " << stream->name() << " to " > > << format.toString(); > > @@ -739,6 +773,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > return -EINVAL; > > } > > > > + LOG(RPI, Debug) > > + << "Stream " << stream->name() << " has color space " > > + << ColorSpace::toString(cfg.colorSpace); > > + > > cfg.setStream(stream); > > stream->setExternal(true); > > > > @@ -763,6 +801,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > format = {}; > > format.size = maxSize; > > format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); > > + /* No one asked for output, so the color space doesn't matter. */ > > + format.colorSpace = ColorSpace::Jpeg; > > ret = data->isp_[Isp::Output0].dev()->setFormat(&format); > > if (ret) { > > LOG(RPI, Error) > > -- > > 2.30.2 > >
Hi David, On Fri, Dec 10, 2021 at 03:08:03PM +0000, David Plowman wrote: > On Fri, 10 Dec 2021 at 14:51, Jacopo Mondi wrote: > > On Fri, Dec 10, 2021 at 02:44:24PM +0000, David Plowman wrote: > > > The Raspberry Pi pipeline handler now sets color spaces correctly. > > > > > > In generateConfiguration() it sets them to reasonable default values > > > based on the stream role. > > > > > > validate() now calls validateColorSpaces() to ensure that the > > > requested color spaces are sensible, before proceeding to check what > > > the hardware can deliver. > > > > > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > > > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > > > Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > > --- > > > .../pipeline/raspberrypi/raspberrypi.cpp | 40 +++++++++++++++++++ > > > 1 file changed, 40 insertions(+) > > > > > > diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > > index 86851ac4..eb74d96c 100644 > > > --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > > +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > > > @@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format, > > > > > > deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix); > > > deviceFormat.size = format.size; > > > + deviceFormat.colorSpace = format.colorSpace; > > > return deviceFormat; > > > } > > > > > > @@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size & > > > { > > > double bestScore = std::numeric_limits<double>::max(), score; > > > V4L2SubdeviceFormat bestFormat; > > > + bestFormat.colorSpace = ColorSpace::Raw; > > > > > > constexpr float penaltyAr = 1500.0; > > > constexpr float penaltyBitDepth = 500.0; > > > @@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > > > if (config_.empty()) > > > return Invalid; > > > > > > + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); > > > + > > > /* > > > * What if the platform has a non-90 degree rotation? We can't even > > > * "adjust" the configuration and carry on. Alternatively, raising an > > > @@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > > > V4L2DeviceFormat format; > > > format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > > > format.size = cfg.size; > > > + format.colorSpace = cfg.colorSpace; > > > + > > > + LOG(RPI, Debug) > > > + << "Try color space " << ColorSpace::toString(cfg.colorSpace); > > > > > > int ret = dev->tryFormat(&format); > > > if (ret) > > > return Invalid; > > > > > > + if (!format.colorSpace || cfg.colorSpace != format.colorSpace) { > > > > What if format.colorSpace was nullopt before validate ? > > Isn't it enough to check for cfg.colorSpace != format.colorSpace ? > > > > If this is otherwise intended: > > Reviewed-by: Jacopo Mondi <jacopo@jmondi.org> > > Yes, perhaps I've been fiddling very slightly with all this for just a > bit too long. I guess one might just drop the "format.colorSpace ||" > from the line above (as you suggest) and it would be strictly correct > (and fine). > > Laurent, is that fixable-while-applying or would another version of > this specific patch be better? Absolutely fixable while applying. The whole series has been fully reviewed, so I'll queue it for merge. > > > + status = Adjusted; > > > + LOG(RPI, Debug) > > > + << "Color space changed from " > > > + << ColorSpace::toString(cfg.colorSpace) << " to " > > > + << ColorSpace::toString(format.colorSpace); > > > + } > > > + > > > + cfg.colorSpace = format.colorSpace; > > > + > > > cfg.stride = format.planes[0].bpl; > > > cfg.frameSize = format.planes[0].size; > > > > > > @@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > PixelFormat pixelFormat; > > > V4L2VideoDevice::Formats fmts; > > > Size size; > > > + std::optional<ColorSpace> colorSpace; > > > > > > if (roles.empty()) > > > return config; > > > @@ -539,6 +558,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code, > > > BayerFormat::Packing::CSI2); > > > ASSERT(pixelFormat.isValid()); > > > + colorSpace = ColorSpace::Raw; > > > bufferCount = 2; > > > rawCount++; > > > break; > > > @@ -546,6 +566,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > case StreamRole::StillCapture: > > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > > pixelFormat = formats::NV12; > > > + /* > > > + * Still image codecs usually expect the JPEG color space. > > > + * Even RGB codecs will be fine as the RGB we get with the > > > + * JPEG color space is the same as sRGB. > > > + */ > > > + colorSpace = ColorSpace::Jpeg; > > > /* Return the largest sensor resolution. */ > > > size = sensorSize; > > > bufferCount = 1; > > > @@ -563,6 +589,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > */ > > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > > pixelFormat = formats::YUV420; > > > + /* > > > + * Choose a color space appropriate for video recording. > > > + * Rec.709 will be a good default for HD resolutions. > > > + */ > > > + colorSpace = ColorSpace::Rec709; > > > size = { 1920, 1080 }; > > > bufferCount = 4; > > > outCount++; > > > @@ -571,6 +602,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > case StreamRole::Viewfinder: > > > fmts = data->isp_[Isp::Output0].dev()->formats(); > > > pixelFormat = formats::ARGB8888; > > > + colorSpace = ColorSpace::Jpeg; > > > size = { 800, 600 }; > > > bufferCount = 4; > > > outCount++; > > > @@ -617,6 +649,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, > > > StreamConfiguration cfg(formats); > > > cfg.size = size; > > > cfg.pixelFormat = pixelFormat; > > > + cfg.colorSpace = colorSpace; > > > cfg.bufferCount = bufferCount; > > > config->addConfiguration(cfg); > > > } > > > @@ -724,6 +757,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > > V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > > > format.size = cfg.size; > > > format.fourcc = fourcc; > > > + format.colorSpace = cfg.colorSpace; > > > > > > LOG(RPI, Debug) << "Setting " << stream->name() << " to " > > > << format.toString(); > > > @@ -739,6 +773,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > > return -EINVAL; > > > } > > > > > > + LOG(RPI, Debug) > > > + << "Stream " << stream->name() << " has color space " > > > + << ColorSpace::toString(cfg.colorSpace); > > > + > > > cfg.setStream(stream); > > > stream->setExternal(true); > > > > > > @@ -763,6 +801,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) > > > format = {}; > > > format.size = maxSize; > > > format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); > > > + /* No one asked for output, so the color space doesn't matter. */ > > > + format.colorSpace = ColorSpace::Jpeg; > > > ret = data->isp_[Isp::Output0].dev()->setFormat(&format); > > > if (ret) { > > > LOG(RPI, Error)
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp index 86851ac4..eb74d96c 100644 --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp @@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format, deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix); deviceFormat.size = format.size; + deviceFormat.colorSpace = format.colorSpace; return deviceFormat; } @@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size & { double bestScore = std::numeric_limits<double>::max(), score; V4L2SubdeviceFormat bestFormat; + bestFormat.colorSpace = ColorSpace::Raw; constexpr float penaltyAr = 1500.0; constexpr float penaltyBitDepth = 500.0; @@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (config_.empty()) return Invalid; + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); + /* * What if the platform has a non-90 degree rotation? We can't even * "adjust" the configuration and carry on. Alternatively, raising an @@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() V4L2DeviceFormat format; format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; + format.colorSpace = cfg.colorSpace; + + LOG(RPI, Debug) + << "Try color space " << ColorSpace::toString(cfg.colorSpace); int ret = dev->tryFormat(&format); if (ret) return Invalid; + if (!format.colorSpace || cfg.colorSpace != format.colorSpace) { + status = Adjusted; + LOG(RPI, Debug) + << "Color space changed from " + << ColorSpace::toString(cfg.colorSpace) << " to " + << ColorSpace::toString(format.colorSpace); + } + + cfg.colorSpace = format.colorSpace; + cfg.stride = format.planes[0].bpl; cfg.frameSize = format.planes[0].size; @@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, PixelFormat pixelFormat; V4L2VideoDevice::Formats fmts; Size size; + std::optional<ColorSpace> colorSpace; if (roles.empty()) return config; @@ -539,6 +558,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code, BayerFormat::Packing::CSI2); ASSERT(pixelFormat.isValid()); + colorSpace = ColorSpace::Raw; bufferCount = 2; rawCount++; break; @@ -546,6 +566,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::StillCapture: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::NV12; + /* + * Still image codecs usually expect the JPEG color space. + * Even RGB codecs will be fine as the RGB we get with the + * JPEG color space is the same as sRGB. + */ + colorSpace = ColorSpace::Jpeg; /* Return the largest sensor resolution. */ size = sensorSize; bufferCount = 1; @@ -563,6 +589,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, */ fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::YUV420; + /* + * Choose a color space appropriate for video recording. + * Rec.709 will be a good default for HD resolutions. + */ + colorSpace = ColorSpace::Rec709; size = { 1920, 1080 }; bufferCount = 4; outCount++; @@ -571,6 +602,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::Viewfinder: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::ARGB8888; + colorSpace = ColorSpace::Jpeg; size = { 800, 600 }; bufferCount = 4; outCount++; @@ -617,6 +649,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, StreamConfiguration cfg(formats); cfg.size = size; cfg.pixelFormat = pixelFormat; + cfg.colorSpace = colorSpace; cfg.bufferCount = bufferCount; config->addConfiguration(cfg); } @@ -724,6 +757,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; format.fourcc = fourcc; + format.colorSpace = cfg.colorSpace; LOG(RPI, Debug) << "Setting " << stream->name() << " to " << format.toString(); @@ -739,6 +773,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) return -EINVAL; } + LOG(RPI, Debug) + << "Stream " << stream->name() << " has color space " + << ColorSpace::toString(cfg.colorSpace); + cfg.setStream(stream); stream->setExternal(true); @@ -763,6 +801,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) format = {}; format.size = maxSize; format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); + /* No one asked for output, so the color space doesn't matter. */ + format.colorSpace = ColorSpace::Jpeg; ret = data->isp_[Isp::Output0].dev()->setFormat(&format); if (ret) { LOG(RPI, Error)