Message ID | 20220905073956.7342-6-naush@raspberrypi.com |
---|---|
State | Superseded |
Headers | show |
Series |
|
Related | show |
HI Naush Thanks for the patch. On Mon, 5 Sept 2022 at 08:40, Naushir Patuck via libcamera-devel <libcamera-devel@lists.libcamera.org> wrote: > > Cycle through an array of RPiController::Metadata objects, one per > prepare()/process() invocation, when running the controller algorithms. This > allows historical metadata objects to be retained, and subsequently passed into > the controller algorithms on future frames. > > This change provides a route to fixing a problem with the AGC algorithm, where > if a manual shutter/gain is requested, the algorithm does not currently retain > any context of the total exposure that it has calculated. As a result, the > wrong digital gain would be applied when the frame with the manual shutter/gain > is processed by the ISP. > > Signed-off-by: Naushir Patuck <naush@raspberrypi.com> > --- > src/ipa/raspberrypi/raspberrypi.cpp | 69 +++++++++++++++++------------ > 1 file changed, 40 insertions(+), 29 deletions(-) > > diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp > index 8d731435764e..63cccda901c1 100644 > --- a/src/ipa/raspberrypi/raspberrypi.cpp > +++ b/src/ipa/raspberrypi/raspberrypi.cpp > @@ -57,6 +57,9 @@ namespace libcamera { > using namespace std::literals::chrono_literals; > using utils::Duration; > > +/* Number of metadata objects available in the context list. */ > +constexpr unsigned int numMetadataContexts = 16; Just curious as to what determines this number. Presumably it only has to be bigger than the max delay we have? Can anything else affect it, like when the IPAs skip frames at high framerates? But anyway, it all looked good to me. Reviewed-by: David Plowman <david.plowman@raspberrypi.com> Tested-by: David Plowman <david.plowman@raspberrypi.com> Thanks! David > + > /* Configure the sensor with these values initially. */ > constexpr double defaultAnalogueGain = 1.0; > constexpr Duration defaultExposureTime = 20.0ms; > @@ -163,7 +166,8 @@ private: > /* Raspberry Pi controller specific defines. */ > std::unique_ptr<RPiController::CamHelper> helper_; > RPiController::Controller controller_; > - RPiController::Metadata rpiMetadata_; > + std::array<RPiController::Metadata, numMetadataContexts> rpiMetadata_; > + unsigned int metadataIdx_; > > /* > * We count frames to decide if the frame must be hidden (e.g. from > @@ -319,6 +323,7 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig) > > firstStart_ = false; > lastRunTimestamp_ = 0; > + metadataIdx_ = 0; > } > > void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo) > @@ -513,6 +518,7 @@ void IPARPi::signalStatReady(uint32_t bufferId) > reportMetadata(); > > statsMetadataComplete.emit(bufferId & MaskID, libcameraMetadata_); > + metadataIdx_ = (metadataIdx_ + 1) % rpiMetadata_.size(); > } > > void IPARPi::signalQueueRequest(const ControlList &controls) > @@ -536,14 +542,15 @@ void IPARPi::signalIspPrepare(const ISPConfig &data) > > void IPARPi::reportMetadata() > { > - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); > + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; > + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); > > /* > * Certain information about the current frame and how it will be > * processed can be extracted and placed into the libcamera metadata > * buffer, where an application could query it. > */ > - DeviceStatus *deviceStatus = rpiMetadata_.getLocked<DeviceStatus>("device.status"); > + DeviceStatus *deviceStatus = rpiMetadata.getLocked<DeviceStatus>("device.status"); > if (deviceStatus) { > libcameraMetadata_.set(controls::ExposureTime, > deviceStatus->shutterSpeed.get<std::micro>()); > @@ -554,17 +561,17 @@ void IPARPi::reportMetadata() > libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensorTemperature); > } > > - AgcStatus *agcStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status"); > + AgcStatus *agcStatus = rpiMetadata.getLocked<AgcStatus>("agc.status"); > if (agcStatus) { > libcameraMetadata_.set(controls::AeLocked, agcStatus->locked); > libcameraMetadata_.set(controls::DigitalGain, agcStatus->digitalGain); > } > > - LuxStatus *luxStatus = rpiMetadata_.getLocked<LuxStatus>("lux.status"); > + LuxStatus *luxStatus = rpiMetadata.getLocked<LuxStatus>("lux.status"); > if (luxStatus) > libcameraMetadata_.set(controls::Lux, luxStatus->lux); > > - AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status"); > + AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status"); > if (awbStatus) { > libcameraMetadata_.set(controls::ColourGains, > Span<const float, 2>({ static_cast<float>(awbStatus->gainR), > @@ -572,7 +579,7 @@ void IPARPi::reportMetadata() > libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperatureK); > } > > - BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); > + BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); > if (blackLevelStatus) > libcameraMetadata_.set(controls::SensorBlackLevels, > Span<const int32_t, 4>({ static_cast<int32_t>(blackLevelStatus->blackLevelR), > @@ -580,7 +587,7 @@ void IPARPi::reportMetadata() > static_cast<int32_t>(blackLevelStatus->blackLevelG), > static_cast<int32_t>(blackLevelStatus->blackLevelB) })); > > - FocusStatus *focusStatus = rpiMetadata_.getLocked<FocusStatus>("focus.status"); > + FocusStatus *focusStatus = rpiMetadata.getLocked<FocusStatus>("focus.status"); > if (focusStatus && focusStatus->num == 12) { > /* > * We get a 4x3 grid of regions by default. Calculate the average > @@ -591,7 +598,7 @@ void IPARPi::reportMetadata() > libcameraMetadata_.set(controls::FocusFoM, focusFoM); > } > > - CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status"); > + CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status"); > if (ccmStatus) { > float m[9]; > for (unsigned int i = 0; i < 9; i++) > @@ -1002,10 +1009,10 @@ void IPARPi::returnEmbeddedBuffer(unsigned int bufferId) > void IPARPi::prepareISP(const ISPConfig &data) > { > int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp).value_or(0); > - RPiController::Metadata lastMetadata; > + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; > Span<uint8_t> embeddedBuffer; > > - lastMetadata = std::move(rpiMetadata_); > + rpiMetadata.clear(); > fillDeviceStatus(data.controls); > > if (data.embeddedBufferPresent) { > @@ -1022,7 +1029,7 @@ void IPARPi::prepareISP(const ISPConfig &data) > * This may overwrite the DeviceStatus using values from the sensor > * metadata, and may also do additional custom processing. > */ > - helper_->prepare(embeddedBuffer, rpiMetadata_); > + helper_->prepare(embeddedBuffer, rpiMetadata); > > /* Done with embedded data now, return to pipeline handler asap. */ > if (data.embeddedBufferPresent) > @@ -1038,7 +1045,9 @@ void IPARPi::prepareISP(const ISPConfig &data) > * current frame, or any other bits of metadata that were added > * in helper_->Prepare(). > */ > - rpiMetadata_.merge(lastMetadata); > + RPiController::Metadata &lastMetadata = > + rpiMetadata_[metadataIdx_ == 0 ? rpiMetadata_.size() - 1 : metadataIdx_ - 1]; > + rpiMetadata.mergeCopy(lastMetadata); > processPending_ = false; > return; > } > @@ -1048,48 +1057,48 @@ void IPARPi::prepareISP(const ISPConfig &data) > > ControlList ctrls(ispCtrls_); > > - controller_.prepare(&rpiMetadata_); > + controller_.prepare(&rpiMetadata); > > /* Lock the metadata buffer to avoid constant locks/unlocks. */ > - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); > + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); > > - AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status"); > + AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status"); > if (awbStatus) > applyAWB(awbStatus, ctrls); > > - CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status"); > + CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status"); > if (ccmStatus) > applyCCM(ccmStatus, ctrls); > > - AgcStatus *dgStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status"); > + AgcStatus *dgStatus = rpiMetadata.getLocked<AgcStatus>("agc.status"); > if (dgStatus) > applyDG(dgStatus, ctrls); > > - AlscStatus *lsStatus = rpiMetadata_.getLocked<AlscStatus>("alsc.status"); > + AlscStatus *lsStatus = rpiMetadata.getLocked<AlscStatus>("alsc.status"); > if (lsStatus) > applyLS(lsStatus, ctrls); > > - ContrastStatus *contrastStatus = rpiMetadata_.getLocked<ContrastStatus>("contrast.status"); > + ContrastStatus *contrastStatus = rpiMetadata.getLocked<ContrastStatus>("contrast.status"); > if (contrastStatus) > applyGamma(contrastStatus, ctrls); > > - BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); > + BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); > if (blackLevelStatus) > applyBlackLevel(blackLevelStatus, ctrls); > > - GeqStatus *geqStatus = rpiMetadata_.getLocked<GeqStatus>("geq.status"); > + GeqStatus *geqStatus = rpiMetadata.getLocked<GeqStatus>("geq.status"); > if (geqStatus) > applyGEQ(geqStatus, ctrls); > > - DenoiseStatus *denoiseStatus = rpiMetadata_.getLocked<DenoiseStatus>("denoise.status"); > + DenoiseStatus *denoiseStatus = rpiMetadata.getLocked<DenoiseStatus>("denoise.status"); > if (denoiseStatus) > applyDenoise(denoiseStatus, ctrls); > > - SharpenStatus *sharpenStatus = rpiMetadata_.getLocked<SharpenStatus>("sharpen.status"); > + SharpenStatus *sharpenStatus = rpiMetadata.getLocked<SharpenStatus>("sharpen.status"); > if (sharpenStatus) > applySharpen(sharpenStatus, ctrls); > > - DpcStatus *dpcStatus = rpiMetadata_.getLocked<DpcStatus>("dpc.status"); > + DpcStatus *dpcStatus = rpiMetadata.getLocked<DpcStatus>("dpc.status"); > if (dpcStatus) > applyDPC(dpcStatus, ctrls); > > @@ -1111,11 +1120,13 @@ void IPARPi::fillDeviceStatus(const ControlList &sensorControls) > > LOG(IPARPI, Debug) << "Metadata - " << deviceStatus; > > - rpiMetadata_.set("device.status", deviceStatus); > + rpiMetadata_[metadataIdx_].set("device.status", deviceStatus); > } > > void IPARPi::processStats(unsigned int bufferId) > { > + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; > + > auto it = buffers_.find(bufferId); > if (it == buffers_.end()) { > LOG(IPARPI, Error) << "Could not find stats buffer!"; > @@ -1125,11 +1136,11 @@ void IPARPi::processStats(unsigned int bufferId) > Span<uint8_t> mem = it->second.planes()[0]; > bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data()); > RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats); > - helper_->process(statistics, rpiMetadata_); > - controller_.process(statistics, &rpiMetadata_); > + helper_->process(statistics, rpiMetadata); > + controller_.process(statistics, &rpiMetadata); > > struct AgcStatus agcStatus; > - if (rpiMetadata_.get("agc.status", agcStatus) == 0) { > + if (rpiMetadata.get("agc.status", agcStatus) == 0) { > ControlList ctrls(sensorCtrls_); > applyAGC(&agcStatus, ctrls); > > -- > 2.25.1 >
Hi Davd, Thanks for the review of this series! On Fri, 23 Sept 2022 at 11:38, David Plowman <david.plowman@raspberrypi.com> wrote: > HI Naush > > Thanks for the patch. > > On Mon, 5 Sept 2022 at 08:40, Naushir Patuck via libcamera-devel > <libcamera-devel@lists.libcamera.org> wrote: > > > > Cycle through an array of RPiController::Metadata objects, one per > > prepare()/process() invocation, when running the controller algorithms. > This > > allows historical metadata objects to be retained, and subsequently > passed into > > the controller algorithms on future frames. > > > > This change provides a route to fixing a problem with the AGC algorithm, > where > > if a manual shutter/gain is requested, the algorithm does not currently > retain > > any context of the total exposure that it has calculated. As a result, > the > > wrong digital gain would be applied when the frame with the manual > shutter/gain > > is processed by the ISP. > > > > Signed-off-by: Naushir Patuck <naush@raspberrypi.com> > > --- > > src/ipa/raspberrypi/raspberrypi.cpp | 69 +++++++++++++++++------------ > > 1 file changed, 40 insertions(+), 29 deletions(-) > > > > diff --git a/src/ipa/raspberrypi/raspberrypi.cpp > b/src/ipa/raspberrypi/raspberrypi.cpp > > index 8d731435764e..63cccda901c1 100644 > > --- a/src/ipa/raspberrypi/raspberrypi.cpp > > +++ b/src/ipa/raspberrypi/raspberrypi.cpp > > @@ -57,6 +57,9 @@ namespace libcamera { > > using namespace std::literals::chrono_literals; > > using utils::Duration; > > > > +/* Number of metadata objects available in the context list. */ > > +constexpr unsigned int numMetadataContexts = 16; > > Just curious as to what determines this number. Presumably it only has > to be bigger than the max delay we have? Can anything else affect it, > like when the IPAs skip frames at high framerates? > It's actually the max delay + the number of dropped frames at startup. I chose 16 because it matches the DelayedControl ring buffer size.... I think! I will post a v3 with your other amendments + address Umang's comments early next week. Regards, Naush > > But anyway, it all looked good to me. > > Reviewed-by: David Plowman <david.plowman@raspberrypi.com> > Tested-by: David Plowman <david.plowman@raspberrypi.com> > > Thanks! > David > > > + > > /* Configure the sensor with these values initially. */ > > constexpr double defaultAnalogueGain = 1.0; > > constexpr Duration defaultExposureTime = 20.0ms; > > @@ -163,7 +166,8 @@ private: > > /* Raspberry Pi controller specific defines. */ > > std::unique_ptr<RPiController::CamHelper> helper_; > > RPiController::Controller controller_; > > - RPiController::Metadata rpiMetadata_; > > + std::array<RPiController::Metadata, numMetadataContexts> > rpiMetadata_; > > + unsigned int metadataIdx_; > > > > /* > > * We count frames to decide if the frame must be hidden (e.g. > from > > @@ -319,6 +323,7 @@ void IPARPi::start(const ControlList &controls, > StartConfig *startConfig) > > > > firstStart_ = false; > > lastRunTimestamp_ = 0; > > + metadataIdx_ = 0; > > } > > > > void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo) > > @@ -513,6 +518,7 @@ void IPARPi::signalStatReady(uint32_t bufferId) > > reportMetadata(); > > > > statsMetadataComplete.emit(bufferId & MaskID, > libcameraMetadata_); > > + metadataIdx_ = (metadataIdx_ + 1) % rpiMetadata_.size(); > > } > > > > void IPARPi::signalQueueRequest(const ControlList &controls) > > @@ -536,14 +542,15 @@ void IPARPi::signalIspPrepare(const ISPConfig > &data) > > > > void IPARPi::reportMetadata() > > { > > - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); > > + RPiController::Metadata &rpiMetadata = > rpiMetadata_[metadataIdx_]; > > + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); > > > > /* > > * Certain information about the current frame and how it will be > > * processed can be extracted and placed into the libcamera > metadata > > * buffer, where an application could query it. > > */ > > - DeviceStatus *deviceStatus = > rpiMetadata_.getLocked<DeviceStatus>("device.status"); > > + DeviceStatus *deviceStatus = > rpiMetadata.getLocked<DeviceStatus>("device.status"); > > if (deviceStatus) { > > libcameraMetadata_.set(controls::ExposureTime, > > > deviceStatus->shutterSpeed.get<std::micro>()); > > @@ -554,17 +561,17 @@ void IPARPi::reportMetadata() > > > libcameraMetadata_.set(controls::SensorTemperature, > *deviceStatus->sensorTemperature); > > } > > > > - AgcStatus *agcStatus = > rpiMetadata_.getLocked<AgcStatus>("agc.status"); > > + AgcStatus *agcStatus = > rpiMetadata.getLocked<AgcStatus>("agc.status"); > > if (agcStatus) { > > libcameraMetadata_.set(controls::AeLocked, > agcStatus->locked); > > libcameraMetadata_.set(controls::DigitalGain, > agcStatus->digitalGain); > > } > > > > - LuxStatus *luxStatus = > rpiMetadata_.getLocked<LuxStatus>("lux.status"); > > + LuxStatus *luxStatus = > rpiMetadata.getLocked<LuxStatus>("lux.status"); > > if (luxStatus) > > libcameraMetadata_.set(controls::Lux, luxStatus->lux); > > > > - AwbStatus *awbStatus = > rpiMetadata_.getLocked<AwbStatus>("awb.status"); > > + AwbStatus *awbStatus = > rpiMetadata.getLocked<AwbStatus>("awb.status"); > > if (awbStatus) { > > libcameraMetadata_.set(controls::ColourGains, > > Span<const float, 2>({ > static_cast<float>(awbStatus->gainR), > > @@ -572,7 +579,7 @@ void IPARPi::reportMetadata() > > libcameraMetadata_.set(controls::ColourTemperature, > awbStatus->temperatureK); > > } > > > > - BlackLevelStatus *blackLevelStatus = > rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); > > + BlackLevelStatus *blackLevelStatus = > rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); > > if (blackLevelStatus) > > libcameraMetadata_.set(controls::SensorBlackLevels, > > Span<const int32_t, 4>({ > static_cast<int32_t>(blackLevelStatus->blackLevelR), > > @@ -580,7 +587,7 @@ void IPARPi::reportMetadata() > > > static_cast<int32_t>(blackLevelStatus->blackLevelG), > > > static_cast<int32_t>(blackLevelStatus->blackLevelB) })); > > > > - FocusStatus *focusStatus = > rpiMetadata_.getLocked<FocusStatus>("focus.status"); > > + FocusStatus *focusStatus = > rpiMetadata.getLocked<FocusStatus>("focus.status"); > > if (focusStatus && focusStatus->num == 12) { > > /* > > * We get a 4x3 grid of regions by default. Calculate > the average > > @@ -591,7 +598,7 @@ void IPARPi::reportMetadata() > > libcameraMetadata_.set(controls::FocusFoM, focusFoM); > > } > > > > - CcmStatus *ccmStatus = > rpiMetadata_.getLocked<CcmStatus>("ccm.status"); > > + CcmStatus *ccmStatus = > rpiMetadata.getLocked<CcmStatus>("ccm.status"); > > if (ccmStatus) { > > float m[9]; > > for (unsigned int i = 0; i < 9; i++) > > @@ -1002,10 +1009,10 @@ void IPARPi::returnEmbeddedBuffer(unsigned int > bufferId) > > void IPARPi::prepareISP(const ISPConfig &data) > > { > > int64_t frameTimestamp = > data.controls.get(controls::SensorTimestamp).value_or(0); > > - RPiController::Metadata lastMetadata; > > + RPiController::Metadata &rpiMetadata = > rpiMetadata_[metadataIdx_]; > > Span<uint8_t> embeddedBuffer; > > > > - lastMetadata = std::move(rpiMetadata_); > > + rpiMetadata.clear(); > > fillDeviceStatus(data.controls); > > > > if (data.embeddedBufferPresent) { > > @@ -1022,7 +1029,7 @@ void IPARPi::prepareISP(const ISPConfig &data) > > * This may overwrite the DeviceStatus using values from the > sensor > > * metadata, and may also do additional custom processing. > > */ > > - helper_->prepare(embeddedBuffer, rpiMetadata_); > > + helper_->prepare(embeddedBuffer, rpiMetadata); > > > > /* Done with embedded data now, return to pipeline handler asap. > */ > > if (data.embeddedBufferPresent) > > @@ -1038,7 +1045,9 @@ void IPARPi::prepareISP(const ISPConfig &data) > > * current frame, or any other bits of metadata that > were added > > * in helper_->Prepare(). > > */ > > - rpiMetadata_.merge(lastMetadata); > > + RPiController::Metadata &lastMetadata = > > + rpiMetadata_[metadataIdx_ == 0 ? > rpiMetadata_.size() - 1 : metadataIdx_ - 1]; > > + rpiMetadata.mergeCopy(lastMetadata); > > processPending_ = false; > > return; > > } > > @@ -1048,48 +1057,48 @@ void IPARPi::prepareISP(const ISPConfig &data) > > > > ControlList ctrls(ispCtrls_); > > > > - controller_.prepare(&rpiMetadata_); > > + controller_.prepare(&rpiMetadata); > > > > /* Lock the metadata buffer to avoid constant locks/unlocks. */ > > - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); > > + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); > > > > - AwbStatus *awbStatus = > rpiMetadata_.getLocked<AwbStatus>("awb.status"); > > + AwbStatus *awbStatus = > rpiMetadata.getLocked<AwbStatus>("awb.status"); > > if (awbStatus) > > applyAWB(awbStatus, ctrls); > > > > - CcmStatus *ccmStatus = > rpiMetadata_.getLocked<CcmStatus>("ccm.status"); > > + CcmStatus *ccmStatus = > rpiMetadata.getLocked<CcmStatus>("ccm.status"); > > if (ccmStatus) > > applyCCM(ccmStatus, ctrls); > > > > - AgcStatus *dgStatus = > rpiMetadata_.getLocked<AgcStatus>("agc.status"); > > + AgcStatus *dgStatus = > rpiMetadata.getLocked<AgcStatus>("agc.status"); > > if (dgStatus) > > applyDG(dgStatus, ctrls); > > > > - AlscStatus *lsStatus = > rpiMetadata_.getLocked<AlscStatus>("alsc.status"); > > + AlscStatus *lsStatus = > rpiMetadata.getLocked<AlscStatus>("alsc.status"); > > if (lsStatus) > > applyLS(lsStatus, ctrls); > > > > - ContrastStatus *contrastStatus = > rpiMetadata_.getLocked<ContrastStatus>("contrast.status"); > > + ContrastStatus *contrastStatus = > rpiMetadata.getLocked<ContrastStatus>("contrast.status"); > > if (contrastStatus) > > applyGamma(contrastStatus, ctrls); > > > > - BlackLevelStatus *blackLevelStatus = > rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); > > + BlackLevelStatus *blackLevelStatus = > rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); > > if (blackLevelStatus) > > applyBlackLevel(blackLevelStatus, ctrls); > > > > - GeqStatus *geqStatus = > rpiMetadata_.getLocked<GeqStatus>("geq.status"); > > + GeqStatus *geqStatus = > rpiMetadata.getLocked<GeqStatus>("geq.status"); > > if (geqStatus) > > applyGEQ(geqStatus, ctrls); > > > > - DenoiseStatus *denoiseStatus = > rpiMetadata_.getLocked<DenoiseStatus>("denoise.status"); > > + DenoiseStatus *denoiseStatus = > rpiMetadata.getLocked<DenoiseStatus>("denoise.status"); > > if (denoiseStatus) > > applyDenoise(denoiseStatus, ctrls); > > > > - SharpenStatus *sharpenStatus = > rpiMetadata_.getLocked<SharpenStatus>("sharpen.status"); > > + SharpenStatus *sharpenStatus = > rpiMetadata.getLocked<SharpenStatus>("sharpen.status"); > > if (sharpenStatus) > > applySharpen(sharpenStatus, ctrls); > > > > - DpcStatus *dpcStatus = > rpiMetadata_.getLocked<DpcStatus>("dpc.status"); > > + DpcStatus *dpcStatus = > rpiMetadata.getLocked<DpcStatus>("dpc.status"); > > if (dpcStatus) > > applyDPC(dpcStatus, ctrls); > > > > @@ -1111,11 +1120,13 @@ void IPARPi::fillDeviceStatus(const ControlList > &sensorControls) > > > > LOG(IPARPI, Debug) << "Metadata - " << deviceStatus; > > > > - rpiMetadata_.set("device.status", deviceStatus); > > + rpiMetadata_[metadataIdx_].set("device.status", deviceStatus); > > } > > > > void IPARPi::processStats(unsigned int bufferId) > > { > > + RPiController::Metadata &rpiMetadata = > rpiMetadata_[metadataIdx_]; > > + > > auto it = buffers_.find(bufferId); > > if (it == buffers_.end()) { > > LOG(IPARPI, Error) << "Could not find stats buffer!"; > > @@ -1125,11 +1136,11 @@ void IPARPi::processStats(unsigned int bufferId) > > Span<uint8_t> mem = it->second.planes()[0]; > > bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats > *>(mem.data()); > > RPiController::StatisticsPtr statistics = > std::make_shared<bcm2835_isp_stats>(*stats); > > - helper_->process(statistics, rpiMetadata_); > > - controller_.process(statistics, &rpiMetadata_); > > + helper_->process(statistics, rpiMetadata); > > + controller_.process(statistics, &rpiMetadata); > > > > struct AgcStatus agcStatus; > > - if (rpiMetadata_.get("agc.status", agcStatus) == 0) { > > + if (rpiMetadata.get("agc.status", agcStatus) == 0) { > > ControlList ctrls(sensorCtrls_); > > applyAGC(&agcStatus, ctrls); > > > > -- > > 2.25.1 > > >
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp index 8d731435764e..63cccda901c1 100644 --- a/src/ipa/raspberrypi/raspberrypi.cpp +++ b/src/ipa/raspberrypi/raspberrypi.cpp @@ -57,6 +57,9 @@ namespace libcamera { using namespace std::literals::chrono_literals; using utils::Duration; +/* Number of metadata objects available in the context list. */ +constexpr unsigned int numMetadataContexts = 16; + /* Configure the sensor with these values initially. */ constexpr double defaultAnalogueGain = 1.0; constexpr Duration defaultExposureTime = 20.0ms; @@ -163,7 +166,8 @@ private: /* Raspberry Pi controller specific defines. */ std::unique_ptr<RPiController::CamHelper> helper_; RPiController::Controller controller_; - RPiController::Metadata rpiMetadata_; + std::array<RPiController::Metadata, numMetadataContexts> rpiMetadata_; + unsigned int metadataIdx_; /* * We count frames to decide if the frame must be hidden (e.g. from @@ -319,6 +323,7 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig) firstStart_ = false; lastRunTimestamp_ = 0; + metadataIdx_ = 0; } void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo) @@ -513,6 +518,7 @@ void IPARPi::signalStatReady(uint32_t bufferId) reportMetadata(); statsMetadataComplete.emit(bufferId & MaskID, libcameraMetadata_); + metadataIdx_ = (metadataIdx_ + 1) % rpiMetadata_.size(); } void IPARPi::signalQueueRequest(const ControlList &controls) @@ -536,14 +542,15 @@ void IPARPi::signalIspPrepare(const ISPConfig &data) void IPARPi::reportMetadata() { - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); /* * Certain information about the current frame and how it will be * processed can be extracted and placed into the libcamera metadata * buffer, where an application could query it. */ - DeviceStatus *deviceStatus = rpiMetadata_.getLocked<DeviceStatus>("device.status"); + DeviceStatus *deviceStatus = rpiMetadata.getLocked<DeviceStatus>("device.status"); if (deviceStatus) { libcameraMetadata_.set(controls::ExposureTime, deviceStatus->shutterSpeed.get<std::micro>()); @@ -554,17 +561,17 @@ void IPARPi::reportMetadata() libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensorTemperature); } - AgcStatus *agcStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status"); + AgcStatus *agcStatus = rpiMetadata.getLocked<AgcStatus>("agc.status"); if (agcStatus) { libcameraMetadata_.set(controls::AeLocked, agcStatus->locked); libcameraMetadata_.set(controls::DigitalGain, agcStatus->digitalGain); } - LuxStatus *luxStatus = rpiMetadata_.getLocked<LuxStatus>("lux.status"); + LuxStatus *luxStatus = rpiMetadata.getLocked<LuxStatus>("lux.status"); if (luxStatus) libcameraMetadata_.set(controls::Lux, luxStatus->lux); - AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status"); + AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status"); if (awbStatus) { libcameraMetadata_.set(controls::ColourGains, Span<const float, 2>({ static_cast<float>(awbStatus->gainR), @@ -572,7 +579,7 @@ void IPARPi::reportMetadata() libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperatureK); } - BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); + BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); if (blackLevelStatus) libcameraMetadata_.set(controls::SensorBlackLevels, Span<const int32_t, 4>({ static_cast<int32_t>(blackLevelStatus->blackLevelR), @@ -580,7 +587,7 @@ void IPARPi::reportMetadata() static_cast<int32_t>(blackLevelStatus->blackLevelG), static_cast<int32_t>(blackLevelStatus->blackLevelB) })); - FocusStatus *focusStatus = rpiMetadata_.getLocked<FocusStatus>("focus.status"); + FocusStatus *focusStatus = rpiMetadata.getLocked<FocusStatus>("focus.status"); if (focusStatus && focusStatus->num == 12) { /* * We get a 4x3 grid of regions by default. Calculate the average @@ -591,7 +598,7 @@ void IPARPi::reportMetadata() libcameraMetadata_.set(controls::FocusFoM, focusFoM); } - CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status"); + CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status"); if (ccmStatus) { float m[9]; for (unsigned int i = 0; i < 9; i++) @@ -1002,10 +1009,10 @@ void IPARPi::returnEmbeddedBuffer(unsigned int bufferId) void IPARPi::prepareISP(const ISPConfig &data) { int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp).value_or(0); - RPiController::Metadata lastMetadata; + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; Span<uint8_t> embeddedBuffer; - lastMetadata = std::move(rpiMetadata_); + rpiMetadata.clear(); fillDeviceStatus(data.controls); if (data.embeddedBufferPresent) { @@ -1022,7 +1029,7 @@ void IPARPi::prepareISP(const ISPConfig &data) * This may overwrite the DeviceStatus using values from the sensor * metadata, and may also do additional custom processing. */ - helper_->prepare(embeddedBuffer, rpiMetadata_); + helper_->prepare(embeddedBuffer, rpiMetadata); /* Done with embedded data now, return to pipeline handler asap. */ if (data.embeddedBufferPresent) @@ -1038,7 +1045,9 @@ void IPARPi::prepareISP(const ISPConfig &data) * current frame, or any other bits of metadata that were added * in helper_->Prepare(). */ - rpiMetadata_.merge(lastMetadata); + RPiController::Metadata &lastMetadata = + rpiMetadata_[metadataIdx_ == 0 ? rpiMetadata_.size() - 1 : metadataIdx_ - 1]; + rpiMetadata.mergeCopy(lastMetadata); processPending_ = false; return; } @@ -1048,48 +1057,48 @@ void IPARPi::prepareISP(const ISPConfig &data) ControlList ctrls(ispCtrls_); - controller_.prepare(&rpiMetadata_); + controller_.prepare(&rpiMetadata); /* Lock the metadata buffer to avoid constant locks/unlocks. */ - std::unique_lock<RPiController::Metadata> lock(rpiMetadata_); + std::unique_lock<RPiController::Metadata> lock(rpiMetadata); - AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status"); + AwbStatus *awbStatus = rpiMetadata.getLocked<AwbStatus>("awb.status"); if (awbStatus) applyAWB(awbStatus, ctrls); - CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status"); + CcmStatus *ccmStatus = rpiMetadata.getLocked<CcmStatus>("ccm.status"); if (ccmStatus) applyCCM(ccmStatus, ctrls); - AgcStatus *dgStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status"); + AgcStatus *dgStatus = rpiMetadata.getLocked<AgcStatus>("agc.status"); if (dgStatus) applyDG(dgStatus, ctrls); - AlscStatus *lsStatus = rpiMetadata_.getLocked<AlscStatus>("alsc.status"); + AlscStatus *lsStatus = rpiMetadata.getLocked<AlscStatus>("alsc.status"); if (lsStatus) applyLS(lsStatus, ctrls); - ContrastStatus *contrastStatus = rpiMetadata_.getLocked<ContrastStatus>("contrast.status"); + ContrastStatus *contrastStatus = rpiMetadata.getLocked<ContrastStatus>("contrast.status"); if (contrastStatus) applyGamma(contrastStatus, ctrls); - BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status"); + BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked<BlackLevelStatus>("black_level.status"); if (blackLevelStatus) applyBlackLevel(blackLevelStatus, ctrls); - GeqStatus *geqStatus = rpiMetadata_.getLocked<GeqStatus>("geq.status"); + GeqStatus *geqStatus = rpiMetadata.getLocked<GeqStatus>("geq.status"); if (geqStatus) applyGEQ(geqStatus, ctrls); - DenoiseStatus *denoiseStatus = rpiMetadata_.getLocked<DenoiseStatus>("denoise.status"); + DenoiseStatus *denoiseStatus = rpiMetadata.getLocked<DenoiseStatus>("denoise.status"); if (denoiseStatus) applyDenoise(denoiseStatus, ctrls); - SharpenStatus *sharpenStatus = rpiMetadata_.getLocked<SharpenStatus>("sharpen.status"); + SharpenStatus *sharpenStatus = rpiMetadata.getLocked<SharpenStatus>("sharpen.status"); if (sharpenStatus) applySharpen(sharpenStatus, ctrls); - DpcStatus *dpcStatus = rpiMetadata_.getLocked<DpcStatus>("dpc.status"); + DpcStatus *dpcStatus = rpiMetadata.getLocked<DpcStatus>("dpc.status"); if (dpcStatus) applyDPC(dpcStatus, ctrls); @@ -1111,11 +1120,13 @@ void IPARPi::fillDeviceStatus(const ControlList &sensorControls) LOG(IPARPI, Debug) << "Metadata - " << deviceStatus; - rpiMetadata_.set("device.status", deviceStatus); + rpiMetadata_[metadataIdx_].set("device.status", deviceStatus); } void IPARPi::processStats(unsigned int bufferId) { + RPiController::Metadata &rpiMetadata = rpiMetadata_[metadataIdx_]; + auto it = buffers_.find(bufferId); if (it == buffers_.end()) { LOG(IPARPI, Error) << "Could not find stats buffer!"; @@ -1125,11 +1136,11 @@ void IPARPi::processStats(unsigned int bufferId) Span<uint8_t> mem = it->second.planes()[0]; bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data()); RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats); - helper_->process(statistics, rpiMetadata_); - controller_.process(statistics, &rpiMetadata_); + helper_->process(statistics, rpiMetadata); + controller_.process(statistics, &rpiMetadata); struct AgcStatus agcStatus; - if (rpiMetadata_.get("agc.status", agcStatus) == 0) { + if (rpiMetadata.get("agc.status", agcStatus) == 0) { ControlList ctrls(sensorCtrls_); applyAGC(&agcStatus, ctrls);
Cycle through an array of RPiController::Metadata objects, one per prepare()/process() invocation, when running the controller algorithms. This allows historical metadata objects to be retained, and subsequently passed into the controller algorithms on future frames. This change provides a route to fixing a problem with the AGC algorithm, where if a manual shutter/gain is requested, the algorithm does not currently retain any context of the total exposure that it has calculated. As a result, the wrong digital gain would be applied when the frame with the manual shutter/gain is processed by the ISP. Signed-off-by: Naushir Patuck <naush@raspberrypi.com> --- src/ipa/raspberrypi/raspberrypi.cpp | 69 +++++++++++++++++------------ 1 file changed, 40 insertions(+), 29 deletions(-)