Message ID | 20211020110825.12902-8-david.plowman@raspberrypi.com |
---|---|
State | Superseded |
Headers | show |
Series |
|
Related | show |
Hi David, Thank you for your patch. On Wed, 20 Oct 2021 at 12:08, David Plowman <david.plowman@raspberrypi.com> wrote: > The Raspberry Pi pipeline handler now sets color spaces correctly. > > In generateConfiguration() it sets them to reasonable default values > based on the stream role. > > validate() now calls validateColorSpaces() to ensure that the > requested color spaces are sensible, before proceeding to check what > the hardware can deliver. > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > --- > .../pipeline/raspberrypi/raspberrypi.cpp | 42 +++++++++++++++++++ > 1 file changed, 42 insertions(+) > > diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > index 1634ca98..22db54ce 100644 > --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp > @@ -288,6 +288,8 @@ CameraConfiguration::Status > RPiCameraConfiguration::validate() > if (config_.empty()) > return Invalid; > > + status = validateColorSpaces(true); > + > /* > * What if the platform has a non-90 degree rotation? We can't even > * "adjust" the configuration and carry on. Alternatively, raising > an > @@ -354,6 +356,7 @@ CameraConfiguration::Status > RPiCameraConfiguration::validate() > */ > V4L2VideoDevice::Formats fmts = > data_->unicam_[Unicam::Image].dev()->formats(); > V4L2DeviceFormat sensorFormat = findBestMode(fmts, > cfg.size); > + sensorFormat.colorSpace = ColorSpace::Raw; > This is going to conflict badly with my media controller changes ;-) Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > int ret = > data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat); > if (ret) > return Invalid; > @@ -449,10 +452,21 @@ CameraConfiguration::Status > RPiCameraConfiguration::validate() > V4L2DeviceFormat format; > format.fourcc = > V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > format.size = cfg.size; > + format.colorSpace = cfg.requestedColorSpace; > + LOG(RPI, Debug) > + << "Try color space " << > cfg.requestedColorSpace.toString(); > > int ret = dev->tryFormat(&format); > if (ret) > return Invalid; > + cfg.actualColorSpace = format.colorSpace; > + if (cfg.actualColorSpace != cfg.requestedColorSpace) { > + status = Adjusted; > + LOG(RPI, Warning) > + << "Color space changed from " > + << cfg.requestedColorSpace.toString() << " > to " > + << cfg.actualColorSpace.toString(); > + } > > cfg.stride = format.planes[0].bpl; > cfg.frameSize = format.planes[0].size; > @@ -477,6 +491,7 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > PixelFormat pixelFormat; > V4L2VideoDevice::Formats fmts; > Size size; > + ColorSpace colorSpace; > > if (roles.empty()) > return config; > @@ -491,6 +506,7 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > sensorFormat = findBestMode(fmts, size); > pixelFormat = sensorFormat.fourcc.toPixelFormat(); > ASSERT(pixelFormat.isValid()); > + colorSpace = ColorSpace::Raw; > bufferCount = 2; > rawCount++; > break; > @@ -498,6 +514,7 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > case StreamRole::StillCapture: > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::NV12; > + colorSpace = ColorSpace::Jpeg; > /* Return the largest sensor resolution. */ > size = data->sensor_->resolution(); > bufferCount = 1; > @@ -515,6 +532,8 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > */ > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::YUV420; > + /* This will be reasonable for many applications. > */ > + colorSpace = ColorSpace::Rec709; > size = { 1920, 1080 }; > bufferCount = 4; > outCount++; > @@ -523,6 +542,7 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > case StreamRole::Viewfinder: > fmts = data->isp_[Isp::Output0].dev()->formats(); > pixelFormat = formats::ARGB8888; > + colorSpace = ColorSpace::Jpeg; > size = { 800, 600 }; > bufferCount = 4; > outCount++; > @@ -554,6 +574,7 @@ CameraConfiguration > *PipelineHandlerRPi::generateConfiguration(Camera *camera, > StreamConfiguration cfg(formats); > cfg.size = size; > cfg.pixelFormat = pixelFormat; > + cfg.requestedColorSpace = colorSpace; > cfg.bufferCount = bufferCount; > config->addConfiguration(cfg); > } > @@ -601,6 +622,7 @@ int PipelineHandlerRPi::configure(Camera *camera, > CameraConfiguration *config) > /* First calculate the best sensor mode we can use based on the > user request. */ > V4L2VideoDevice::Formats fmts = > data->unicam_[Unicam::Image].dev()->formats(); > V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? > sensorSize : maxSize); > + sensorFormat.colorSpace = ColorSpace::Raw; > > /* > * Unicam image output format. The ISP input format gets set at > start, > @@ -650,6 +672,7 @@ int PipelineHandlerRPi::configure(Camera *camera, > CameraConfiguration *config) > V4L2PixelFormat fourcc = > V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); > format.size = cfg.size; > format.fourcc = fourcc; > + format.colorSpace = cfg.requestedColorSpace; > > LOG(RPI, Debug) << "Setting " << stream->name() << " to " > << format.toString(); > @@ -665,6 +688,23 @@ int PipelineHandlerRPi::configure(Camera *camera, > CameraConfiguration *config) > return -EINVAL; > } > > + if (cfg.actualColorSpace != format.colorSpace) { > + /* > + * We should have been through validate() before > so this > + * shouldn't be possible, but we mustn't sweep > color space > + * problems under the carpet. > + */ > + LOG(RPI, Warning) > + << "Unexpected color space change (" > + << cfg.actualColorSpace.toString() << " to > " > + << format.colorSpace.toString() << ") in > stream " > + << stream->name(); > + cfg.actualColorSpace = format.colorSpace; > + } > + LOG(RPI, Debug) > + << "Stream " << stream->name() << " has color > space " > + << cfg.actualColorSpace.toString(); > + > cfg.setStream(stream); > stream->setExternal(true); > > @@ -689,6 +729,8 @@ int PipelineHandlerRPi::configure(Camera *camera, > CameraConfiguration *config) > format = {}; > format.size = maxSize; > format.fourcc = > V4L2PixelFormat::fromPixelFormat(formats::YUV420); > + /* No one asked for output, so the color space doesn't > matter. */ > + format.colorSpace = ColorSpace::Jpeg; > ret = data->isp_[Isp::Output0].dev()->setFormat(&format); > if (ret) { > LOG(RPI, Error) > -- > 2.20.1 > >
Hi Naush Thanks for the reply! On Mon, 25 Oct 2021 at 09:42, Naushir Patuck <naush@raspberrypi.com> wrote: > > Hi David, > > Thank you for your patch. > > On Wed, 20 Oct 2021 at 12:08, David Plowman <david.plowman@raspberrypi.com> wrote: >> >> The Raspberry Pi pipeline handler now sets color spaces correctly. >> >> In generateConfiguration() it sets them to reasonable default values >> based on the stream role. >> >> validate() now calls validateColorSpaces() to ensure that the >> requested color spaces are sensible, before proceeding to check what >> the hardware can deliver. >> >> Signed-off-by: David Plowman <david.plowman@raspberrypi.com> >> --- >> .../pipeline/raspberrypi/raspberrypi.cpp | 42 +++++++++++++++++++ >> 1 file changed, 42 insertions(+) >> >> diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp >> index 1634ca98..22db54ce 100644 >> --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp >> +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp >> @@ -288,6 +288,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() >> if (config_.empty()) >> return Invalid; >> >> + status = validateColorSpaces(true); >> + >> /* >> * What if the platform has a non-90 degree rotation? We can't even >> * "adjust" the configuration and carry on. Alternatively, raising an >> @@ -354,6 +356,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() >> */ >> V4L2VideoDevice::Formats fmts = data_->unicam_[Unicam::Image].dev()->formats(); >> V4L2DeviceFormat sensorFormat = findBestMode(fmts, cfg.size); >> + sensorFormat.colorSpace = ColorSpace::Raw; > > > This is going to conflict badly with my media controller changes ;-) *sigh* yes I know :( But on the plus side, there aren't that many changes here, and some of it may actually get tidier (setting the sensor colour space explicitly to "raw", for example, should just disappear, right?). Best regards David > > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > >> >> int ret = data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat); >> if (ret) >> return Invalid; >> @@ -449,10 +452,21 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() >> V4L2DeviceFormat format; >> format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); >> format.size = cfg.size; >> + format.colorSpace = cfg.requestedColorSpace; >> + LOG(RPI, Debug) >> + << "Try color space " << cfg.requestedColorSpace.toString(); >> >> int ret = dev->tryFormat(&format); >> if (ret) >> return Invalid; >> + cfg.actualColorSpace = format.colorSpace; >> + if (cfg.actualColorSpace != cfg.requestedColorSpace) { >> + status = Adjusted; >> + LOG(RPI, Warning) >> + << "Color space changed from " >> + << cfg.requestedColorSpace.toString() << " to " >> + << cfg.actualColorSpace.toString(); >> + } >> >> cfg.stride = format.planes[0].bpl; >> cfg.frameSize = format.planes[0].size; >> @@ -477,6 +491,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> PixelFormat pixelFormat; >> V4L2VideoDevice::Formats fmts; >> Size size; >> + ColorSpace colorSpace; >> >> if (roles.empty()) >> return config; >> @@ -491,6 +506,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> sensorFormat = findBestMode(fmts, size); >> pixelFormat = sensorFormat.fourcc.toPixelFormat(); >> ASSERT(pixelFormat.isValid()); >> + colorSpace = ColorSpace::Raw; >> bufferCount = 2; >> rawCount++; >> break; >> @@ -498,6 +514,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> case StreamRole::StillCapture: >> fmts = data->isp_[Isp::Output0].dev()->formats(); >> pixelFormat = formats::NV12; >> + colorSpace = ColorSpace::Jpeg; >> /* Return the largest sensor resolution. */ >> size = data->sensor_->resolution(); >> bufferCount = 1; >> @@ -515,6 +532,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> */ >> fmts = data->isp_[Isp::Output0].dev()->formats(); >> pixelFormat = formats::YUV420; >> + /* This will be reasonable for many applications. */ >> + colorSpace = ColorSpace::Rec709; >> size = { 1920, 1080 }; >> bufferCount = 4; >> outCount++; >> @@ -523,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> case StreamRole::Viewfinder: >> fmts = data->isp_[Isp::Output0].dev()->formats(); >> pixelFormat = formats::ARGB8888; >> + colorSpace = ColorSpace::Jpeg; >> size = { 800, 600 }; >> bufferCount = 4; >> outCount++; >> @@ -554,6 +574,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, >> StreamConfiguration cfg(formats); >> cfg.size = size; >> cfg.pixelFormat = pixelFormat; >> + cfg.requestedColorSpace = colorSpace; >> cfg.bufferCount = bufferCount; >> config->addConfiguration(cfg); >> } >> @@ -601,6 +622,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) >> /* First calculate the best sensor mode we can use based on the user request. */ >> V4L2VideoDevice::Formats fmts = data->unicam_[Unicam::Image].dev()->formats(); >> V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? sensorSize : maxSize); >> + sensorFormat.colorSpace = ColorSpace::Raw; >> >> /* >> * Unicam image output format. The ISP input format gets set at start, >> @@ -650,6 +672,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) >> V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); >> format.size = cfg.size; >> format.fourcc = fourcc; >> + format.colorSpace = cfg.requestedColorSpace; >> >> LOG(RPI, Debug) << "Setting " << stream->name() << " to " >> << format.toString(); >> @@ -665,6 +688,23 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) >> return -EINVAL; >> } >> >> + if (cfg.actualColorSpace != format.colorSpace) { >> + /* >> + * We should have been through validate() before so this >> + * shouldn't be possible, but we mustn't sweep color space >> + * problems under the carpet. >> + */ >> + LOG(RPI, Warning) >> + << "Unexpected color space change (" >> + << cfg.actualColorSpace.toString() << " to " >> + << format.colorSpace.toString() << ") in stream " >> + << stream->name(); >> + cfg.actualColorSpace = format.colorSpace; >> + } >> + LOG(RPI, Debug) >> + << "Stream " << stream->name() << " has color space " >> + << cfg.actualColorSpace.toString(); >> + >> cfg.setStream(stream); >> stream->setExternal(true); >> >> @@ -689,6 +729,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) >> format = {}; >> format.size = maxSize; >> format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); >> + /* No one asked for output, so the color space doesn't matter. */ >> + format.colorSpace = ColorSpace::Jpeg; >> ret = data->isp_[Isp::Output0].dev()->setFormat(&format); >> if (ret) { >> LOG(RPI, Error) >> -- >> 2.20.1 >>
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp index 1634ca98..22db54ce 100644 --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp @@ -288,6 +288,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (config_.empty()) return Invalid; + status = validateColorSpaces(true); + /* * What if the platform has a non-90 degree rotation? We can't even * "adjust" the configuration and carry on. Alternatively, raising an @@ -354,6 +356,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() */ V4L2VideoDevice::Formats fmts = data_->unicam_[Unicam::Image].dev()->formats(); V4L2DeviceFormat sensorFormat = findBestMode(fmts, cfg.size); + sensorFormat.colorSpace = ColorSpace::Raw; int ret = data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat); if (ret) return Invalid; @@ -449,10 +452,21 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() V4L2DeviceFormat format; format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; + format.colorSpace = cfg.requestedColorSpace; + LOG(RPI, Debug) + << "Try color space " << cfg.requestedColorSpace.toString(); int ret = dev->tryFormat(&format); if (ret) return Invalid; + cfg.actualColorSpace = format.colorSpace; + if (cfg.actualColorSpace != cfg.requestedColorSpace) { + status = Adjusted; + LOG(RPI, Warning) + << "Color space changed from " + << cfg.requestedColorSpace.toString() << " to " + << cfg.actualColorSpace.toString(); + } cfg.stride = format.planes[0].bpl; cfg.frameSize = format.planes[0].size; @@ -477,6 +491,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, PixelFormat pixelFormat; V4L2VideoDevice::Formats fmts; Size size; + ColorSpace colorSpace; if (roles.empty()) return config; @@ -491,6 +506,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, sensorFormat = findBestMode(fmts, size); pixelFormat = sensorFormat.fourcc.toPixelFormat(); ASSERT(pixelFormat.isValid()); + colorSpace = ColorSpace::Raw; bufferCount = 2; rawCount++; break; @@ -498,6 +514,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::StillCapture: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::NV12; + colorSpace = ColorSpace::Jpeg; /* Return the largest sensor resolution. */ size = data->sensor_->resolution(); bufferCount = 1; @@ -515,6 +532,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, */ fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::YUV420; + /* This will be reasonable for many applications. */ + colorSpace = ColorSpace::Rec709; size = { 1920, 1080 }; bufferCount = 4; outCount++; @@ -523,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::Viewfinder: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::ARGB8888; + colorSpace = ColorSpace::Jpeg; size = { 800, 600 }; bufferCount = 4; outCount++; @@ -554,6 +574,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, StreamConfiguration cfg(formats); cfg.size = size; cfg.pixelFormat = pixelFormat; + cfg.requestedColorSpace = colorSpace; cfg.bufferCount = bufferCount; config->addConfiguration(cfg); } @@ -601,6 +622,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) /* First calculate the best sensor mode we can use based on the user request. */ V4L2VideoDevice::Formats fmts = data->unicam_[Unicam::Image].dev()->formats(); V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? sensorSize : maxSize); + sensorFormat.colorSpace = ColorSpace::Raw; /* * Unicam image output format. The ISP input format gets set at start, @@ -650,6 +672,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; format.fourcc = fourcc; + format.colorSpace = cfg.requestedColorSpace; LOG(RPI, Debug) << "Setting " << stream->name() << " to " << format.toString(); @@ -665,6 +688,23 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) return -EINVAL; } + if (cfg.actualColorSpace != format.colorSpace) { + /* + * We should have been through validate() before so this + * shouldn't be possible, but we mustn't sweep color space + * problems under the carpet. + */ + LOG(RPI, Warning) + << "Unexpected color space change (" + << cfg.actualColorSpace.toString() << " to " + << format.colorSpace.toString() << ") in stream " + << stream->name(); + cfg.actualColorSpace = format.colorSpace; + } + LOG(RPI, Debug) + << "Stream " << stream->name() << " has color space " + << cfg.actualColorSpace.toString(); + cfg.setStream(stream); stream->setExternal(true); @@ -689,6 +729,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) format = {}; format.size = maxSize; format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); + /* No one asked for output, so the color space doesn't matter. */ + format.colorSpace = ColorSpace::Jpeg; ret = data->isp_[Isp::Output0].dev()->setFormat(&format); if (ret) { LOG(RPI, Error)
The Raspberry Pi pipeline handler now sets color spaces correctly. In generateConfiguration() it sets them to reasonable default values based on the stream role. validate() now calls validateColorSpaces() to ensure that the requested color spaces are sensible, before proceeding to check what the hardware can deliver. Signed-off-by: David Plowman <david.plowman@raspberrypi.com> --- .../pipeline/raspberrypi/raspberrypi.cpp | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+)