Message ID | 20211028100349.1098545-3-hanlinchen@chromium.org |
---|---|
State | Superseded |
Headers | show |
Series |
|
Related | show |
Hi Han-Lin, Thank you for the patch. On 10/28/21 3:33 PM, Han-Lin Chen wrote: > Set the statistics with the latest AE AiqResults which has the same > exposure time and analog gain. The patch reduces the AE haunting during > the converging process. > > Signed-off-by: Han-Lin Chen <hanlinchen@chromium.com> > --- > aiq/aiq_input_parameters.cpp | 4 +-- > ipu3.cpp | 66 ++++++++++++++++++++++++++++++------ > 2 files changed, 57 insertions(+), 13 deletions(-) > > diff --git a/aiq/aiq_input_parameters.cpp b/aiq/aiq_input_parameters.cpp > index bc87b31..5dd2f6c 100644 > --- a/aiq/aiq_input_parameters.cpp > +++ b/aiq/aiq_input_parameters.cpp > @@ -130,7 +130,7 @@ AiqInputParameters &AiqInputParameters::operator=(const AiqInputParameters &othe > > void AiqInputParameters::setAeAwbAfDefaults() > { > - /*Ae Params */ > + /* Ae Params */ > aeInputParams.num_exposures = NUM_EXPOSURES; > aeInputParams.frame_use = ia_aiq_frame_use_preview; > aeInputParams.flash_mode = ia_aiq_flash_mode_off; > @@ -166,7 +166,7 @@ void AiqInputParameters::setAeAwbAfDefaults() > ia_aiq_af_range_normal, > ia_aiq_af_metering_mode_auto, > ia_aiq_flash_mode_off, > - NULL, NULL, false > + &focusRect, &manualFocusParams, false Should this be moved to focus patch one instead? I find this odd to be present in this patch, since it's about AE results? > }; > > /* GBCE Params */ > diff --git a/ipu3.cpp b/ipu3.cpp > index 8126e9d..b7de901 100644 > --- a/ipu3.cpp > +++ b/ipu3.cpp > @@ -59,7 +59,8 @@ private: > void fillParams(unsigned int frame, ipu3_uapi_params *params); > void parseStatistics(unsigned int frame, > int64_t frameTimestamp, > - const ipu3_uapi_stats_3a *stats); > + const ipu3_uapi_stats_3a *stats, > + const ControlList& sensorCtrls); > > void setControls(unsigned int frame); > > @@ -84,6 +85,7 @@ private: > /* Temporary storage until we have a FrameContext object / struct */ > aiq::AiqInputParameters aiqInputParams_; > aiq::AiqResults results_; > + aiq::AiqResultsRingBuffer resultsHistory_; > > BinaryData aiqb_; > BinaryData nvm_; > @@ -282,6 +284,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, > /* Upate the camera controls using the new sensor settings. */ > updateControls(sensorInfo_, ctrls_, ipaControls); > > + resultsHistory_.Reset(); > + > return 0; > } > > @@ -327,7 +331,10 @@ void IPAIPU3::processEvent(const IPU3Event &event) > const ipu3_uapi_stats_3a *stats = > reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); > > - parseStatistics(event.frame, event.frameTimestamp, stats); > + parseStatistics(event.frame, > + event.frameTimestamp, > + stats, > + event.sensorControls); indentation > break; > } > case EventFillParams: { > @@ -374,14 +381,16 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > */ > > /* Run algorithms into/using this context structure */ > - if (frame % 10 == 0) > - aiq_.run2a(frame, aiqInputParams_, results_); > + aiq_.run2a(frame, aiqInputParams_, results_); > > aic_.updateRuntimeParams(results_); > aic_.run(params); > > exposure_ = results_.ae()->exposures[0].sensor_exposure->coarse_integration_time; > gain_ = results_.ae()->exposures[0].sensor_exposure->analog_gain_code_global; > + > + resultsHistory_.Push(results_); Possibly usage of ringbuffer to highlight how it's used, what it provides (a high level documentation on it's 'need', the 'when' and 'why' of previous algo-runs results lookup) would be good to understand the context I believe. All this can be a separate patch. > + > setControls(frame); > > IPU3Action op; > @@ -392,7 +401,8 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > void IPAIPU3::parseStatistics(unsigned int frame, > int64_t frameTimestamp, > - const ipu3_uapi_stats_3a *stats) > + const ipu3_uapi_stats_3a *stats, > + const ControlList& sensorCtrls) > { > ControlList ctrls(controls::controls); > > @@ -403,10 +413,43 @@ void IPAIPU3::parseStatistics(unsigned int frame, > */ > > ASSERT (frameTimestamp > 0); > - aiq_.setStatistics(frame, frameTimestamp, results_, stats); > + > + /* Ae algorithm expects the statistics to be set with it's corresponding Ae > + * result, i.e., the Ae result should match the exposure time/analog gain > + * with the the effective sensor controls of the statistics. > + * Search the required Ae result in the result history and combine it with > + * the latest result as the input to AIQ::setStatistics(). > + */ Is there a bit more descriptive documentation I can refer to? Maybe in the ia_aiq documentation to understand 'when' and 'how' the aiq results are meant to be re-used? I think I wrote my last comment a bit hastily > + > + int32_t effectiveExpo = 0; > + int32_t effectiveGain = 0; > + ControlValue ctrlValue; > + > + ctrlValue = sensorCtrls.get(V4L2_CID_EXPOSURE); > + if (!ctrlValue.isNone()) > + effectiveExpo = ctrlValue.get<int32_t>(); > + > + ctrlValue = sensorCtrls.get(V4L2_CID_ANALOGUE_GAIN); > + if (!ctrlValue.isNone()) > + effectiveGain = ctrlValue.get<int32_t>(); > + > + auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { > + ia_aiq_exposure_sensor_parameters* sensorExposure = > + result.ae()->exposures[0].sensor_exposure; > + > + return (effectiveExpo == sensorExposure->coarse_integration_time || > + effectiveGain == sensorExposure->analog_gain_code_global); > + }; > + aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, results_); > + aiq::AiqResults combinedResults = results_; > + combinedResults.setAe(aeMatchedResults.ae()); > + > + /* Aiq library expects timestamp in microseconds */ > + aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); > > /* Set frame durations from exposure results */ > - ia_aiq_exposure_sensor_parameters *sensorExposure = results_.ae()->exposures->sensor_exposure; > + ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; > int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / > (sensorInfo_.pixelRate / 1e6); > ctrls.set(controls::FrameDuration, frameDuration); > @@ -423,10 +466,11 @@ void IPAIPU3::setControls(unsigned int frame) > IPU3Action op; > op.op = ActionSetSensorControls; > > - ControlList ctrls(ctrls_); > - ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > - ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > - op.controls = ctrls; > + ControlList sensorCtrls(ctrls_); > + sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > + sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > + > + op.sensorControls = sensorCtrls; > > queueFrameAction.emit(frame, op); > }
Hi Han-Lin, One more comment, On 10/28/21 3:33 PM, Han-Lin Chen wrote: > Set the statistics with the latest AE AiqResults which has the same > exposure time and analog gain. The patch reduces the AE haunting during > the converging process. > > Signed-off-by: Han-Lin Chen <hanlinchen@chromium.com> > --- > aiq/aiq_input_parameters.cpp | 4 +-- > ipu3.cpp | 66 ++++++++++++++++++++++++++++++------ > 2 files changed, 57 insertions(+), 13 deletions(-) > > diff --git a/aiq/aiq_input_parameters.cpp b/aiq/aiq_input_parameters.cpp > index bc87b31..5dd2f6c 100644 > --- a/aiq/aiq_input_parameters.cpp > +++ b/aiq/aiq_input_parameters.cpp > @@ -130,7 +130,7 @@ AiqInputParameters &AiqInputParameters::operator=(const AiqInputParameters &othe > > void AiqInputParameters::setAeAwbAfDefaults() > { > - /*Ae Params */ > + /* Ae Params */ > aeInputParams.num_exposures = NUM_EXPOSURES; > aeInputParams.frame_use = ia_aiq_frame_use_preview; > aeInputParams.flash_mode = ia_aiq_flash_mode_off; > @@ -166,7 +166,7 @@ void AiqInputParameters::setAeAwbAfDefaults() > ia_aiq_af_range_normal, > ia_aiq_af_metering_mode_auto, > ia_aiq_flash_mode_off, > - NULL, NULL, false > + &focusRect, &manualFocusParams, false > }; > > /* GBCE Params */ > diff --git a/ipu3.cpp b/ipu3.cpp > index 8126e9d..b7de901 100644 > --- a/ipu3.cpp > +++ b/ipu3.cpp > @@ -59,7 +59,8 @@ private: > void fillParams(unsigned int frame, ipu3_uapi_params *params); > void parseStatistics(unsigned int frame, > int64_t frameTimestamp, > - const ipu3_uapi_stats_3a *stats); > + const ipu3_uapi_stats_3a *stats, > + const ControlList& sensorCtrls); > > void setControls(unsigned int frame); > > @@ -84,6 +85,7 @@ private: > /* Temporary storage until we have a FrameContext object / struct */ > aiq::AiqInputParameters aiqInputParams_; > aiq::AiqResults results_; > + aiq::AiqResultsRingBuffer resultsHistory_; > > BinaryData aiqb_; > BinaryData nvm_; > @@ -282,6 +284,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, > /* Upate the camera controls using the new sensor settings. */ > updateControls(sensorInfo_, ctrls_, ipaControls); > > + resultsHistory_.Reset(); > + > return 0; > } > > @@ -327,7 +331,10 @@ void IPAIPU3::processEvent(const IPU3Event &event) > const ipu3_uapi_stats_3a *stats = > reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); > > - parseStatistics(event.frame, event.frameTimestamp, stats); > + parseStatistics(event.frame, > + event.frameTimestamp, > + stats, > + event.sensorControls); > break; > } > case EventFillParams: { > @@ -374,14 +381,16 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > */ > > /* Run algorithms into/using this context structure */ > - if (frame % 10 == 0) > - aiq_.run2a(frame, aiqInputParams_, results_); > + aiq_.run2a(frame, aiqInputParams_, results_); Is this intended, I believe we'll run all algorithms for each frame now. I vaguely remember chrome use to run algorthims for every X frames once (which as 4 if I wasn't mistaken). Although with 4, I was getting oscillations and so we arbitrarily bumped up to 10. I might be wrong about this though. Do we need to run every algorithms for each frame until convergence ? or a fixed set before/after that? > > aic_.updateRuntimeParams(results_); > aic_.run(params); > > exposure_ = results_.ae()->exposures[0].sensor_exposure->coarse_integration_time; > gain_ = results_.ae()->exposures[0].sensor_exposure->analog_gain_code_global; > + > + resultsHistory_.Push(results_); > + > setControls(frame); > > IPU3Action op; > @@ -392,7 +401,8 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > void IPAIPU3::parseStatistics(unsigned int frame, > int64_t frameTimestamp, > - const ipu3_uapi_stats_3a *stats) > + const ipu3_uapi_stats_3a *stats, > + const ControlList& sensorCtrls) > { > ControlList ctrls(controls::controls); > > @@ -403,10 +413,43 @@ void IPAIPU3::parseStatistics(unsigned int frame, > */ > > ASSERT (frameTimestamp > 0); > - aiq_.setStatistics(frame, frameTimestamp, results_, stats); > + > + /* Ae algorithm expects the statistics to be set with it's corresponding Ae > + * result, i.e., the Ae result should match the exposure time/analog gain > + * with the the effective sensor controls of the statistics. > + * Search the required Ae result in the result history and combine it with > + * the latest result as the input to AIQ::setStatistics(). > + */ > + > + int32_t effectiveExpo = 0; > + int32_t effectiveGain = 0; > + ControlValue ctrlValue; > + > + ctrlValue = sensorCtrls.get(V4L2_CID_EXPOSURE); > + if (!ctrlValue.isNone()) > + effectiveExpo = ctrlValue.get<int32_t>(); > + > + ctrlValue = sensorCtrls.get(V4L2_CID_ANALOGUE_GAIN); > + if (!ctrlValue.isNone()) > + effectiveGain = ctrlValue.get<int32_t>(); > + > + auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { > + ia_aiq_exposure_sensor_parameters* sensorExposure = > + result.ae()->exposures[0].sensor_exposure; > + > + return (effectiveExpo == sensorExposure->coarse_integration_time || > + effectiveGain == sensorExposure->analog_gain_code_global); > + }; > + aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, results_); > + > + aiq::AiqResults combinedResults = results_; > + combinedResults.setAe(aeMatchedResults.ae()); > + > + /* Aiq library expects timestamp in microseconds */ > + aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); > > /* Set frame durations from exposure results */ > - ia_aiq_exposure_sensor_parameters *sensorExposure = results_.ae()->exposures->sensor_exposure; > + ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; > int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / > (sensorInfo_.pixelRate / 1e6); > ctrls.set(controls::FrameDuration, frameDuration); > @@ -423,10 +466,11 @@ void IPAIPU3::setControls(unsigned int frame) > IPU3Action op; > op.op = ActionSetSensorControls; > > - ControlList ctrls(ctrls_); > - ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > - ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > - op.controls = ctrls; > + ControlList sensorCtrls(ctrls_); > + sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > + sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > + > + op.sensorControls = sensorCtrls; > > queueFrameAction.emit(frame, op); > }
Hi Umang, Thanks for the review. On Fri, Oct 29, 2021 at 2:38 AM Umang Jain <umang.jain@ideasonboard.com> wrote: > > Hi Han-Lin, > > One more comment, > > On 10/28/21 3:33 PM, Han-Lin Chen wrote: > > Set the statistics with the latest AE AiqResults which has the same > > exposure time and analog gain. The patch reduces the AE haunting during > > the converging process. > > > > Signed-off-by: Han-Lin Chen <hanlinchen@chromium.com> > > --- > > aiq/aiq_input_parameters.cpp | 4 +-- > > ipu3.cpp | 66 ++++++++++++++++++++++++++++++------ > > 2 files changed, 57 insertions(+), 13 deletions(-) > > > > diff --git a/aiq/aiq_input_parameters.cpp b/aiq/aiq_input_parameters.cpp > > index bc87b31..5dd2f6c 100644 > > --- a/aiq/aiq_input_parameters.cpp > > +++ b/aiq/aiq_input_parameters.cpp > > @@ -130,7 +130,7 @@ AiqInputParameters &AiqInputParameters::operator=(const AiqInputParameters &othe > > > > void AiqInputParameters::setAeAwbAfDefaults() > > { > > - /*Ae Params */ > > + /* Ae Params */ > > aeInputParams.num_exposures = NUM_EXPOSURES; > > aeInputParams.frame_use = ia_aiq_frame_use_preview; > > aeInputParams.flash_mode = ia_aiq_flash_mode_off; > > @@ -166,7 +166,7 @@ void AiqInputParameters::setAeAwbAfDefaults() > > ia_aiq_af_range_normal, > > ia_aiq_af_metering_mode_auto, > > ia_aiq_flash_mode_off, > > - NULL, NULL, false > > + &focusRect, &manualFocusParams, false > > }; > > > > /* GBCE Params */ > > diff --git a/ipu3.cpp b/ipu3.cpp > > index 8126e9d..b7de901 100644 > > --- a/ipu3.cpp > > +++ b/ipu3.cpp > > @@ -59,7 +59,8 @@ private: > > void fillParams(unsigned int frame, ipu3_uapi_params *params); > > void parseStatistics(unsigned int frame, > > int64_t frameTimestamp, > > - const ipu3_uapi_stats_3a *stats); > > + const ipu3_uapi_stats_3a *stats, > > + const ControlList& sensorCtrls); > > > > void setControls(unsigned int frame); > > > > @@ -84,6 +85,7 @@ private: > > /* Temporary storage until we have a FrameContext object / struct */ > > aiq::AiqInputParameters aiqInputParams_; > > aiq::AiqResults results_; > > + aiq::AiqResultsRingBuffer resultsHistory_; > > > > BinaryData aiqb_; > > BinaryData nvm_; > > @@ -282,6 +284,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, > > /* Upate the camera controls using the new sensor settings. */ > > updateControls(sensorInfo_, ctrls_, ipaControls); > > > > + resultsHistory_.Reset(); > > + > > return 0; > > } > > > > @@ -327,7 +331,10 @@ void IPAIPU3::processEvent(const IPU3Event &event) > > const ipu3_uapi_stats_3a *stats = > > reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); > > > > - parseStatistics(event.frame, event.frameTimestamp, stats); > > + parseStatistics(event.frame, > > + event.frameTimestamp, > > + stats, > > + event.sensorControls); > > break; > > } > > case EventFillParams: { > > @@ -374,14 +381,16 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > */ > > > > /* Run algorithms into/using this context structure */ > > - if (frame % 10 == 0) > > - aiq_.run2a(frame, aiqInputParams_, results_); > > + aiq_.run2a(frame, aiqInputParams_, results_); > > > Is this intended, I believe we'll run all algorithms for each frame now. > I vaguely remember chrome use to run algorthims for every X frames once > (which as 4 if I wasn't mistaken). Although with 4, I was getting > oscillations and so we arbitrarily bumped up to 10. I noticed the oscillations too ;-). In fact, the "frame_use" and "AiqResults History" patches are to fix the oscillations problem. I don't remember whether chrome handles it though. In my understanding, Intel HAL runs the algorithm for all frames. Running an algorithm for every X frames may slow down the AE/AF converging time, like X times? > > I might be wrong about this though. Do we need to run every algorithms > for each frame until convergence ? or a fixed set before/after that? Because AE/AF may rescan once the scenes are changed in Auto mode, and only AIQ knows when the scenes are changed and reports the lens controls, I suppose we cannot stop running it after it's converged. > > > > > aic_.updateRuntimeParams(results_); > > aic_.run(params); > > > > exposure_ = results_.ae()->exposures[0].sensor_exposure->coarse_integration_time; > > gain_ = results_.ae()->exposures[0].sensor_exposure->analog_gain_code_global; > > + > > + resultsHistory_.Push(results_); > > + > > setControls(frame); > > > > IPU3Action op; > > @@ -392,7 +401,8 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > > > void IPAIPU3::parseStatistics(unsigned int frame, > > int64_t frameTimestamp, > > - const ipu3_uapi_stats_3a *stats) > > + const ipu3_uapi_stats_3a *stats, > > + const ControlList& sensorCtrls) > > { > > ControlList ctrls(controls::controls); > > > > @@ -403,10 +413,43 @@ void IPAIPU3::parseStatistics(unsigned int frame, > > */ > > > > ASSERT (frameTimestamp > 0); > > - aiq_.setStatistics(frame, frameTimestamp, results_, stats); > > + > > + /* Ae algorithm expects the statistics to be set with it's corresponding Ae > > + * result, i.e., the Ae result should match the exposure time/analog gain > > + * with the the effective sensor controls of the statistics. > > + * Search the required Ae result in the result history and combine it with > > + * the latest result as the input to AIQ::setStatistics(). > > + */ > > + > > + int32_t effectiveExpo = 0; > > + int32_t effectiveGain = 0; > > + ControlValue ctrlValue; > > + > > + ctrlValue = sensorCtrls.get(V4L2_CID_EXPOSURE); > > + if (!ctrlValue.isNone()) > > + effectiveExpo = ctrlValue.get<int32_t>(); > > + > > + ctrlValue = sensorCtrls.get(V4L2_CID_ANALOGUE_GAIN); > > + if (!ctrlValue.isNone()) > > + effectiveGain = ctrlValue.get<int32_t>(); > > + > > + auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { > > + ia_aiq_exposure_sensor_parameters* sensorExposure = > > + result.ae()->exposures[0].sensor_exposure; > > + > > + return (effectiveExpo == sensorExposure->coarse_integration_time || > > + effectiveGain == sensorExposure->analog_gain_code_global); > > + }; > > + aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, results_); > > + > > + aiq::AiqResults combinedResults = results_; > > + combinedResults.setAe(aeMatchedResults.ae()); > > + > > + /* Aiq library expects timestamp in microseconds */ > > + aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); > > > > /* Set frame durations from exposure results */ > > - ia_aiq_exposure_sensor_parameters *sensorExposure = results_.ae()->exposures->sensor_exposure; > > + ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; > > int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / > > (sensorInfo_.pixelRate / 1e6); > > ctrls.set(controls::FrameDuration, frameDuration); > > @@ -423,10 +466,11 @@ void IPAIPU3::setControls(unsigned int frame) > > IPU3Action op; > > op.op = ActionSetSensorControls; > > > > - ControlList ctrls(ctrls_); > > - ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > > - ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > > - op.controls = ctrls; > > + ControlList sensorCtrls(ctrls_); > > + sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > > + sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > > + > > + op.sensorControls = sensorCtrls; > > > > queueFrameAction.emit(frame, op); > > }
On Fri, Oct 29, 2021 at 2:34 AM Umang Jain <umang.jain@ideasonboard.com> wrote: > > Hi Han-Lin, > > Thank you for the patch. > > On 10/28/21 3:33 PM, Han-Lin Chen wrote: > > Set the statistics with the latest AE AiqResults which has the same > > exposure time and analog gain. The patch reduces the AE haunting during > > the converging process. > > > > Signed-off-by: Han-Lin Chen <hanlinchen@chromium.com> > > --- > > aiq/aiq_input_parameters.cpp | 4 +-- > > ipu3.cpp | 66 ++++++++++++++++++++++++++++++------ > > 2 files changed, 57 insertions(+), 13 deletions(-) > > > > diff --git a/aiq/aiq_input_parameters.cpp b/aiq/aiq_input_parameters.cpp > > index bc87b31..5dd2f6c 100644 > > --- a/aiq/aiq_input_parameters.cpp > > +++ b/aiq/aiq_input_parameters.cpp > > @@ -130,7 +130,7 @@ AiqInputParameters &AiqInputParameters::operator=(const AiqInputParameters &othe > > > > void AiqInputParameters::setAeAwbAfDefaults() > > { > > - /*Ae Params */ > > + /* Ae Params */ > > aeInputParams.num_exposures = NUM_EXPOSURES; > > aeInputParams.frame_use = ia_aiq_frame_use_preview; > > aeInputParams.flash_mode = ia_aiq_flash_mode_off; > > @@ -166,7 +166,7 @@ void AiqInputParameters::setAeAwbAfDefaults() > > ia_aiq_af_range_normal, > > ia_aiq_af_metering_mode_auto, > > ia_aiq_flash_mode_off, > > - NULL, NULL, false > > + &focusRect, &manualFocusParams, false > > > Should this be moved to focus patch one instead? I find this odd to be > present in this patch, since it's about AE results? Yes, you're right ;-). > > > }; > > > > /* GBCE Params */ > > diff --git a/ipu3.cpp b/ipu3.cpp > > index 8126e9d..b7de901 100644 > > --- a/ipu3.cpp > > +++ b/ipu3.cpp > > @@ -59,7 +59,8 @@ private: > > void fillParams(unsigned int frame, ipu3_uapi_params *params); > > void parseStatistics(unsigned int frame, > > int64_t frameTimestamp, > > - const ipu3_uapi_stats_3a *stats); > > + const ipu3_uapi_stats_3a *stats, > > + const ControlList& sensorCtrls); > > > > void setControls(unsigned int frame); > > > > @@ -84,6 +85,7 @@ private: > > /* Temporary storage until we have a FrameContext object / struct */ > > aiq::AiqInputParameters aiqInputParams_; > > aiq::AiqResults results_; > > + aiq::AiqResultsRingBuffer resultsHistory_; > > > > BinaryData aiqb_; > > BinaryData nvm_; > > @@ -282,6 +284,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, > > /* Upate the camera controls using the new sensor settings. */ > > updateControls(sensorInfo_, ctrls_, ipaControls); > > > > + resultsHistory_.Reset(); > > + > > return 0; > > } > > > > @@ -327,7 +331,10 @@ void IPAIPU3::processEvent(const IPU3Event &event) > > const ipu3_uapi_stats_3a *stats = > > reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); > > > > - parseStatistics(event.frame, event.frameTimestamp, stats); > > + parseStatistics(event.frame, > > + event.frameTimestamp, > > + stats, > > + event.sensorControls); > > > indentation > > > break; > > } > > case EventFillParams: { > > @@ -374,14 +381,16 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > */ > > > > /* Run algorithms into/using this context structure */ > > - if (frame % 10 == 0) > > - aiq_.run2a(frame, aiqInputParams_, results_); > > + aiq_.run2a(frame, aiqInputParams_, results_); > > > > aic_.updateRuntimeParams(results_); > > aic_.run(params); > > > > exposure_ = results_.ae()->exposures[0].sensor_exposure->coarse_integration_time; > > gain_ = results_.ae()->exposures[0].sensor_exposure->analog_gain_code_global; > > + > > + resultsHistory_.Push(results_); > > > Possibly usage of ringbuffer to highlight how it's used, what it > provides (a high level documentation on it's 'need', the 'when' and > 'why' of previous algo-runs results lookup) would be good to understand > the context I believe. All this can be a separate patch. > > > + > > setControls(frame); > > > > IPU3Action op; > > @@ -392,7 +401,8 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) > > > > void IPAIPU3::parseStatistics(unsigned int frame, > > int64_t frameTimestamp, > > - const ipu3_uapi_stats_3a *stats) > > + const ipu3_uapi_stats_3a *stats, > > + const ControlList& sensorCtrls) > > { > > ControlList ctrls(controls::controls); > > > > @@ -403,10 +413,43 @@ void IPAIPU3::parseStatistics(unsigned int frame, > > */ > > > > ASSERT (frameTimestamp > 0); > > - aiq_.setStatistics(frame, frameTimestamp, results_, stats); > > + > > + /* Ae algorithm expects the statistics to be set with it's corresponding Ae > > + * result, i.e., the Ae result should match the exposure time/analog gain > > + * with the the effective sensor controls of the statistics. > > + * Search the required Ae result in the result history and combine it with > > + * the latest result as the input to AIQ::setStatistics(). > > + */ > > > Is there a bit more descriptive documentation I can refer to? Maybe in > the ia_aiq documentation to understand 'when' and 'how' the aiq results > are meant to be re-used? I think I wrote my last comment a bit hastily Sadly, there are no documents about it. I collect the restriction from Intel's HAL implementation and experiments. > > > + > > + int32_t effectiveExpo = 0; > > + int32_t effectiveGain = 0; > > + ControlValue ctrlValue; > > + > > + ctrlValue = sensorCtrls.get(V4L2_CID_EXPOSURE); > > + if (!ctrlValue.isNone()) > > + effectiveExpo = ctrlValue.get<int32_t>(); > > + > > + ctrlValue = sensorCtrls.get(V4L2_CID_ANALOGUE_GAIN); > > + if (!ctrlValue.isNone()) > > + effectiveGain = ctrlValue.get<int32_t>(); > > + > > + auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { > > + ia_aiq_exposure_sensor_parameters* sensorExposure = > > + result.ae()->exposures[0].sensor_exposure; > > + > > + return (effectiveExpo == sensorExposure->coarse_integration_time || > > + effectiveGain == sensorExposure->analog_gain_code_global); > > + }; > > + aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, results_); > > + aiq::AiqResults combinedResults = results_; > > + combinedResults.setAe(aeMatchedResults.ae()); > > + > > + /* Aiq library expects timestamp in microseconds */ > > + aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); > > > > /* Set frame durations from exposure results */ > > - ia_aiq_exposure_sensor_parameters *sensorExposure = results_.ae()->exposures->sensor_exposure; > > + ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; > > int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / > > (sensorInfo_.pixelRate / 1e6); > > ctrls.set(controls::FrameDuration, frameDuration); > > @@ -423,10 +466,11 @@ void IPAIPU3::setControls(unsigned int frame) > > IPU3Action op; > > op.op = ActionSetSensorControls; > > > > - ControlList ctrls(ctrls_); > > - ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > > - ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > > - op.controls = ctrls; > > + ControlList sensorCtrls(ctrls_); > > + sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); > > + sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); > > + > > + op.sensorControls = sensorCtrls; > > > > queueFrameAction.emit(frame, op); > > }
diff --git a/aiq/aiq_input_parameters.cpp b/aiq/aiq_input_parameters.cpp index bc87b31..5dd2f6c 100644 --- a/aiq/aiq_input_parameters.cpp +++ b/aiq/aiq_input_parameters.cpp @@ -130,7 +130,7 @@ AiqInputParameters &AiqInputParameters::operator=(const AiqInputParameters &othe void AiqInputParameters::setAeAwbAfDefaults() { - /*Ae Params */ + /* Ae Params */ aeInputParams.num_exposures = NUM_EXPOSURES; aeInputParams.frame_use = ia_aiq_frame_use_preview; aeInputParams.flash_mode = ia_aiq_flash_mode_off; @@ -166,7 +166,7 @@ void AiqInputParameters::setAeAwbAfDefaults() ia_aiq_af_range_normal, ia_aiq_af_metering_mode_auto, ia_aiq_flash_mode_off, - NULL, NULL, false + &focusRect, &manualFocusParams, false }; /* GBCE Params */ diff --git a/ipu3.cpp b/ipu3.cpp index 8126e9d..b7de901 100644 --- a/ipu3.cpp +++ b/ipu3.cpp @@ -59,7 +59,8 @@ private: void fillParams(unsigned int frame, ipu3_uapi_params *params); void parseStatistics(unsigned int frame, int64_t frameTimestamp, - const ipu3_uapi_stats_3a *stats); + const ipu3_uapi_stats_3a *stats, + const ControlList& sensorCtrls); void setControls(unsigned int frame); @@ -84,6 +85,7 @@ private: /* Temporary storage until we have a FrameContext object / struct */ aiq::AiqInputParameters aiqInputParams_; aiq::AiqResults results_; + aiq::AiqResultsRingBuffer resultsHistory_; BinaryData aiqb_; BinaryData nvm_; @@ -282,6 +284,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, /* Upate the camera controls using the new sensor settings. */ updateControls(sensorInfo_, ctrls_, ipaControls); + resultsHistory_.Reset(); + return 0; } @@ -327,7 +331,10 @@ void IPAIPU3::processEvent(const IPU3Event &event) const ipu3_uapi_stats_3a *stats = reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); - parseStatistics(event.frame, event.frameTimestamp, stats); + parseStatistics(event.frame, + event.frameTimestamp, + stats, + event.sensorControls); break; } case EventFillParams: { @@ -374,14 +381,16 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) */ /* Run algorithms into/using this context structure */ - if (frame % 10 == 0) - aiq_.run2a(frame, aiqInputParams_, results_); + aiq_.run2a(frame, aiqInputParams_, results_); aic_.updateRuntimeParams(results_); aic_.run(params); exposure_ = results_.ae()->exposures[0].sensor_exposure->coarse_integration_time; gain_ = results_.ae()->exposures[0].sensor_exposure->analog_gain_code_global; + + resultsHistory_.Push(results_); + setControls(frame); IPU3Action op; @@ -392,7 +401,8 @@ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) void IPAIPU3::parseStatistics(unsigned int frame, int64_t frameTimestamp, - const ipu3_uapi_stats_3a *stats) + const ipu3_uapi_stats_3a *stats, + const ControlList& sensorCtrls) { ControlList ctrls(controls::controls); @@ -403,10 +413,43 @@ void IPAIPU3::parseStatistics(unsigned int frame, */ ASSERT (frameTimestamp > 0); - aiq_.setStatistics(frame, frameTimestamp, results_, stats); + + /* Ae algorithm expects the statistics to be set with it's corresponding Ae + * result, i.e., the Ae result should match the exposure time/analog gain + * with the the effective sensor controls of the statistics. + * Search the required Ae result in the result history and combine it with + * the latest result as the input to AIQ::setStatistics(). + */ + + int32_t effectiveExpo = 0; + int32_t effectiveGain = 0; + ControlValue ctrlValue; + + ctrlValue = sensorCtrls.get(V4L2_CID_EXPOSURE); + if (!ctrlValue.isNone()) + effectiveExpo = ctrlValue.get<int32_t>(); + + ctrlValue = sensorCtrls.get(V4L2_CID_ANALOGUE_GAIN); + if (!ctrlValue.isNone()) + effectiveGain = ctrlValue.get<int32_t>(); + + auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { + ia_aiq_exposure_sensor_parameters* sensorExposure = + result.ae()->exposures[0].sensor_exposure; + + return (effectiveExpo == sensorExposure->coarse_integration_time || + effectiveGain == sensorExposure->analog_gain_code_global); + }; + aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, results_); + + aiq::AiqResults combinedResults = results_; + combinedResults.setAe(aeMatchedResults.ae()); + + /* Aiq library expects timestamp in microseconds */ + aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); /* Set frame durations from exposure results */ - ia_aiq_exposure_sensor_parameters *sensorExposure = results_.ae()->exposures->sensor_exposure; + ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / (sensorInfo_.pixelRate / 1e6); ctrls.set(controls::FrameDuration, frameDuration); @@ -423,10 +466,11 @@ void IPAIPU3::setControls(unsigned int frame) IPU3Action op; op.op = ActionSetSensorControls; - ControlList ctrls(ctrls_); - ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); - ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); - op.controls = ctrls; + ControlList sensorCtrls(ctrls_); + sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); + sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); + + op.sensorControls = sensorCtrls; queueFrameAction.emit(frame, op); }
Set the statistics with the latest AE AiqResults which has the same exposure time and analog gain. The patch reduces the AE haunting during the converging process. Signed-off-by: Han-Lin Chen <hanlinchen@chromium.com> --- aiq/aiq_input_parameters.cpp | 4 +-- ipu3.cpp | 66 ++++++++++++++++++++++++++++++------ 2 files changed, 57 insertions(+), 13 deletions(-)