Message ID | 20230921165550.50956-5-jacopo.mondi@ideasonboard.com |
---|---|
State | Accepted |
Headers | show |
Series |
|
Related | show |
Hi Jacopo, Thank you for the patch. On Thu, Sep 21, 2023 at 06:55:41PM +0200, Jacopo Mondi via libcamera-devel wrote: > Handle the SensorConfiguration provided by the application in the > pipeline validate() and configure() call chains. > > During validation, first make sure SensorConfiguration is valid, then > handle it to compute the sensor format. > > For the VC4 platform where the RAW stream follows the sensor's > configuration adjust the RAW stream configuration to match the sensor > configuration. > > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > Signed-off-by: Naushir Patuck <naush@raspberrypi.com> > Reviewed-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com> > Signed-off-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com> > --- > .../pipeline/rpi/common/pipeline_base.cpp | 66 ++++++++++++++++--- > .../pipeline/rpi/common/pipeline_base.h | 4 +- > src/libcamera/pipeline/rpi/vc4/vc4.cpp | 28 +++++++- > 3 files changed, 84 insertions(+), 14 deletions(-) > > diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp > index 51fa1bbf9aa9..c02ceb65cae8 100644 > --- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp > +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp > @@ -180,6 +180,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > if (config_.empty()) > return Invalid; > > + /* > + * Make sure that if a sensor configuration has been requested it > + * is valid. > + */ > + if (sensorConfig && !sensorConfig->valid()) { > + LOG(RPI, Error) << "Invalid sensor configuration request"; > + return Invalid; > + } > + > status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); > > /* > @@ -207,19 +216,43 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() > std::sort(outStreams.begin(), outStreams.end(), > [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; }); > > - /* Compute the sensor configuration. */ > - unsigned int bitDepth = defaultRawBitDepth; > - if (!rawStreams.empty()) { > + /* Compute the sensor's format then do any platform specific fixups. */ > + unsigned int bitDepth; > + Size sensorSize; > + > + if (sensorConfig) { > + /* Use the application provided sensor configuration. */ > + bitDepth = sensorConfig->bitDepth; > + sensorSize = sensorConfig->outputSize; > + } else if (!rawStreams.empty()) { > + /* Use the RAW stream format and size. */ > BayerFormat bayerFormat = BayerFormat::fromPixelFormat(rawStreams[0].cfg->pixelFormat); > bitDepth = bayerFormat.bitDepth; > + sensorSize = rawStreams[0].cfg->size; > + } else { > + bitDepth = defaultRawBitDepth; > + sensorSize = outStreams[0].cfg->size; > } > > - sensorFormat_ = data_->findBestFormat(rawStreams.empty() ? outStreams[0].cfg->size > - : rawStreams[0].cfg->size, > - bitDepth); > + sensorFormat_ = data_->findBestFormat(sensorSize, bitDepth); > + > + /* > + * If a sensor configuration has been requested, it should apply > + * without modifications. > + */ > + if (sensorConfig) { > + BayerFormat bayer = BayerFormat::fromMbusCode(sensorFormat_.mbus_code); > + > + if (bayer.bitDepth != sensorConfig->bitDepth || > + sensorFormat_.size != sensorConfig->outputSize) { > + LOG(RPI, Error) << "Invalid sensor configuration: " > + << "bitDepth/size mismatch"; > + return Invalid; > + } > + } > > /* Do any platform specific fixups. */ > - status = data_->platformValidate(rawStreams, outStreams); > + status = data_->platformValidate(this, rawStreams, outStreams); > if (status == Invalid) > return Invalid; > > @@ -467,12 +500,25 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config) > std::sort(ispStreams.begin(), ispStreams.end(), > [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; }); > > - /* Apply the format on the sensor with any cached transform. */ > + /* > + * Apply the format on the sensor with any cached transform. > + * > + * If the application has provided a sensor configuration apply it > + * instead of just applying a format. > + */ > const RPiCameraConfiguration *rpiConfig = > static_cast<const RPiCameraConfiguration *>(config); > - V4L2SubdeviceFormat sensorFormat = rpiConfig->sensorFormat_; > + V4L2SubdeviceFormat sensorFormat; > > - ret = data->sensor_->setFormat(&sensorFormat, rpiConfig->combinedTransform_); > + if (rpiConfig->sensorConfig) { > + ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig, > + rpiConfig->combinedTransform_, > + &sensorFormat); > + } else { > + sensorFormat = rpiConfig->sensorFormat_; > + ret = data->sensor_->setFormat(&sensorFormat, > + rpiConfig->combinedTransform_); > + } > if (ret) > return ret; > > diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h > index dbabc61ea48c..81b2b7d2f4d1 100644 > --- a/src/libcamera/pipeline/rpi/common/pipeline_base.h > +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h > @@ -42,6 +42,7 @@ namespace RPi { > /* Map of mbus codes to supported sizes reported by the sensor. */ > using SensorFormats = std::map<unsigned int, std::vector<Size>>; > > +class RPiCameraConfiguration; > class CameraData : public Camera::Private > { > public: > @@ -72,7 +73,8 @@ public: > V4L2VideoDevice *dev; > }; > > - virtual CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams, > + virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig, > + std::vector<StreamParams> &rawStreams, > std::vector<StreamParams> &outStreams) const = 0; > virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, > std::optional<BayerFormat::Packing> packing, > diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp > index 018cf4881d0e..2670eb8c4bbc 100644 > --- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp > +++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp > @@ -65,7 +65,8 @@ public: > { > } > > - CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams, > + CameraConfiguration::Status platformValidate(RPi::RPiCameraConfiguration *rpiConfig, > + std::vector<StreamParams> &rawStreams, > std::vector<StreamParams> &outStreams) const override; > > int platformPipelineConfigure(const std::unique_ptr<YamlObject> &root) override; > @@ -394,7 +395,8 @@ int PipelineHandlerVc4::platformRegister(std::unique_ptr<RPi::CameraData> &camer > return 0; > } > > -CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamParams> &rawStreams, > +CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig, > + std::vector<StreamParams> &rawStreams, > std::vector<StreamParams> &outStreams) const > { > CameraConfiguration::Status status = CameraConfiguration::Status::Valid; > @@ -405,9 +407,27 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa > return CameraConfiguration::Status::Invalid; > } > > - if (!rawStreams.empty()) > + if (!rawStreams.empty()) { > rawStreams[0].dev = unicam_[Unicam::Image].dev(); > > + /* Adjust the RAW stream to match the computed sensor format. */ > + StreamConfiguration *rawStream = rawStreams[0].cfg; > + BayerFormat rawBayer = BayerFormat::fromMbusCode(rpiConfig->sensorFormat_.mbus_code); > + > + /* Handle flips to make sure to match the RAW stream format. */ > + if (flipsAlterBayerOrder_) > + rawBayer = rawBayer.transform(rpiConfig->combinedTransform_); > + PixelFormat rawFormat = rawBayer.toPixelFormat(); > + > + if (rawStream->pixelFormat != rawFormat || > + rawStream->size != rpiConfig->sensorFormat_.size) { > + rawStream->pixelFormat = rawFormat; > + rawStream->size = rpiConfig->sensorFormat_.size; > + > + status = CameraConfiguration::Adjusted; > + } > + } > + > /* > * For the two ISP outputs, one stream must be equal or smaller than the > * other in all dimensions. > @@ -417,6 +437,8 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa > for (unsigned int i = 0; i < outStreams.size(); i++) { > Size size; > > + /* \todo Warn if upscaling: reduces image quality. */ > + Why is this a todo item ? > size.width = std::min(outStreams[i].cfg->size.width, > outStreams[0].cfg->size.width); > size.height = std::min(outStreams[i].cfg->size.height,
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp index 51fa1bbf9aa9..c02ceb65cae8 100644 --- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp @@ -180,6 +180,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (config_.empty()) return Invalid; + /* + * Make sure that if a sensor configuration has been requested it + * is valid. + */ + if (sensorConfig && !sensorConfig->valid()) { + LOG(RPI, Error) << "Invalid sensor configuration request"; + return Invalid; + } + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); /* @@ -207,19 +216,43 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() std::sort(outStreams.begin(), outStreams.end(), [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; }); - /* Compute the sensor configuration. */ - unsigned int bitDepth = defaultRawBitDepth; - if (!rawStreams.empty()) { + /* Compute the sensor's format then do any platform specific fixups. */ + unsigned int bitDepth; + Size sensorSize; + + if (sensorConfig) { + /* Use the application provided sensor configuration. */ + bitDepth = sensorConfig->bitDepth; + sensorSize = sensorConfig->outputSize; + } else if (!rawStreams.empty()) { + /* Use the RAW stream format and size. */ BayerFormat bayerFormat = BayerFormat::fromPixelFormat(rawStreams[0].cfg->pixelFormat); bitDepth = bayerFormat.bitDepth; + sensorSize = rawStreams[0].cfg->size; + } else { + bitDepth = defaultRawBitDepth; + sensorSize = outStreams[0].cfg->size; } - sensorFormat_ = data_->findBestFormat(rawStreams.empty() ? outStreams[0].cfg->size - : rawStreams[0].cfg->size, - bitDepth); + sensorFormat_ = data_->findBestFormat(sensorSize, bitDepth); + + /* + * If a sensor configuration has been requested, it should apply + * without modifications. + */ + if (sensorConfig) { + BayerFormat bayer = BayerFormat::fromMbusCode(sensorFormat_.mbus_code); + + if (bayer.bitDepth != sensorConfig->bitDepth || + sensorFormat_.size != sensorConfig->outputSize) { + LOG(RPI, Error) << "Invalid sensor configuration: " + << "bitDepth/size mismatch"; + return Invalid; + } + } /* Do any platform specific fixups. */ - status = data_->platformValidate(rawStreams, outStreams); + status = data_->platformValidate(this, rawStreams, outStreams); if (status == Invalid) return Invalid; @@ -467,12 +500,25 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config) std::sort(ispStreams.begin(), ispStreams.end(), [](auto &l, auto &r) { return l.cfg->size > r.cfg->size; }); - /* Apply the format on the sensor with any cached transform. */ + /* + * Apply the format on the sensor with any cached transform. + * + * If the application has provided a sensor configuration apply it + * instead of just applying a format. + */ const RPiCameraConfiguration *rpiConfig = static_cast<const RPiCameraConfiguration *>(config); - V4L2SubdeviceFormat sensorFormat = rpiConfig->sensorFormat_; + V4L2SubdeviceFormat sensorFormat; - ret = data->sensor_->setFormat(&sensorFormat, rpiConfig->combinedTransform_); + if (rpiConfig->sensorConfig) { + ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig, + rpiConfig->combinedTransform_, + &sensorFormat); + } else { + sensorFormat = rpiConfig->sensorFormat_; + ret = data->sensor_->setFormat(&sensorFormat, + rpiConfig->combinedTransform_); + } if (ret) return ret; diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h index dbabc61ea48c..81b2b7d2f4d1 100644 --- a/src/libcamera/pipeline/rpi/common/pipeline_base.h +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h @@ -42,6 +42,7 @@ namespace RPi { /* Map of mbus codes to supported sizes reported by the sensor. */ using SensorFormats = std::map<unsigned int, std::vector<Size>>; +class RPiCameraConfiguration; class CameraData : public Camera::Private { public: @@ -72,7 +73,8 @@ public: V4L2VideoDevice *dev; }; - virtual CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams, + virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig, + std::vector<StreamParams> &rawStreams, std::vector<StreamParams> &outStreams) const = 0; virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, std::optional<BayerFormat::Packing> packing, diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp index 018cf4881d0e..2670eb8c4bbc 100644 --- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp +++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp @@ -65,7 +65,8 @@ public: { } - CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams, + CameraConfiguration::Status platformValidate(RPi::RPiCameraConfiguration *rpiConfig, + std::vector<StreamParams> &rawStreams, std::vector<StreamParams> &outStreams) const override; int platformPipelineConfigure(const std::unique_ptr<YamlObject> &root) override; @@ -394,7 +395,8 @@ int PipelineHandlerVc4::platformRegister(std::unique_ptr<RPi::CameraData> &camer return 0; } -CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamParams> &rawStreams, +CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig, + std::vector<StreamParams> &rawStreams, std::vector<StreamParams> &outStreams) const { CameraConfiguration::Status status = CameraConfiguration::Status::Valid; @@ -405,9 +407,27 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa return CameraConfiguration::Status::Invalid; } - if (!rawStreams.empty()) + if (!rawStreams.empty()) { rawStreams[0].dev = unicam_[Unicam::Image].dev(); + /* Adjust the RAW stream to match the computed sensor format. */ + StreamConfiguration *rawStream = rawStreams[0].cfg; + BayerFormat rawBayer = BayerFormat::fromMbusCode(rpiConfig->sensorFormat_.mbus_code); + + /* Handle flips to make sure to match the RAW stream format. */ + if (flipsAlterBayerOrder_) + rawBayer = rawBayer.transform(rpiConfig->combinedTransform_); + PixelFormat rawFormat = rawBayer.toPixelFormat(); + + if (rawStream->pixelFormat != rawFormat || + rawStream->size != rpiConfig->sensorFormat_.size) { + rawStream->pixelFormat = rawFormat; + rawStream->size = rpiConfig->sensorFormat_.size; + + status = CameraConfiguration::Adjusted; + } + } + /* * For the two ISP outputs, one stream must be equal or smaller than the * other in all dimensions. @@ -417,6 +437,8 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa for (unsigned int i = 0; i < outStreams.size(); i++) { Size size; + /* \todo Warn if upscaling: reduces image quality. */ + size.width = std::min(outStreams[i].cfg->size.width, outStreams[0].cfg->size.width); size.height = std::min(outStreams[i].cfg->size.height,