Message ID | 20230731094641.73646-6-david.plowman@raspberrypi.com |
---|---|
State | Superseded |
Headers | show |
Series |
|
Related | show |
Hi David, Thank you for your patch. On Mon, 31 Jul 2023 at 10:47, David Plowman via libcamera-devel <libcamera-devel@lists.libcamera.org> wrote: > > Whenever we run Agc::process(), we store the most recent total > exposure requested for each channel. > > With these values we can apply the channel constraints after > time-filtering the requested total exposure, but before working out > how much digital gain is needed. > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > --- > src/ipa/rpi/controller/rpi/agc.cpp | 21 ++++-- > src/ipa/rpi/controller/rpi/agc.h | 1 + > src/ipa/rpi/controller/rpi/agc_channel.cpp | 76 ++++++++++++++++++---- > src/ipa/rpi/controller/rpi/agc_channel.h | 8 ++- > src/ipa/rpi/vc4/data/imx477.json | 3 +- > 5 files changed, 90 insertions(+), 19 deletions(-) > > diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp > index 7e627bba..7077fbff 100644 > --- a/src/ipa/rpi/controller/rpi/agc.cpp > +++ b/src/ipa/rpi/controller/rpi/agc.cpp > @@ -40,6 +40,7 @@ int Agc::read(const libcamera::YamlObject ¶ms) > */ > if (!params.contains("channels")) { > LOG(RPiAgc, Debug) << "Single channel only"; > + channelResults_.resize(1, 0s); > channelData_.emplace_back(); > return channelData_.back().channel.read(params, getHardwareConfig()); > } > @@ -59,6 +60,8 @@ int Agc::read(const libcamera::YamlObject ¶ms) > return -1; > } > > + channelResults_.resize(channelData_.size(), 0s); > + > return 0; > } > > @@ -234,15 +237,21 @@ static void getChannelIndex(Metadata *metadata, const char *message, unsigned in > LOG(RPiAgc, Debug) << message; > } > > -static void setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > +static libcamera::utils::Duration > +setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > { > std::unique_lock<RPiController::Metadata> lock(*metadata); > AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status"); > - if (status) > + libcamera::utils::Duration dur = 0s; > + > + if (status) { > status->channel = channelIndex; > - else > + dur = status->totalExposureValue; > + } else > /* This does happen at startup, otherwise it would be a Warning or Error. */ > LOG(RPiAgc, Debug) << message; > + > + return dur; > } > > void Agc::prepare(Metadata *imageMetadata) > @@ -306,8 +315,10 @@ void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata) > /* Can also happen when new channels start. */ > LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet"; > > - channelData.channel.process(stats, &deviceStatus, imageMetadata); > - setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > + channelData.channel.process(stats, &deviceStatus, imageMetadata, channelResults_); > + auto dur = setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > + if (dur) > + channelResults_[channelIndex] = dur; > > /* And onto the next channel for the next call. */ > index_ = (index_ + 1) % activeChannels_.size(); > diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h > index 2eed2bab..acd6dc2f 100644 > --- a/src/ipa/rpi/controller/rpi/agc.h > +++ b/src/ipa/rpi/controller/rpi/agc.h > @@ -54,6 +54,7 @@ private: > std::vector<AgcChannelData> channelData_; > std::vector<unsigned int> activeChannels_; > unsigned int index_; /* index into the activeChannels_ */ > + AgcChannelResults channelResults_; It took me a while to understand what channelResults_ was. Perhaps we can rename this to channelTotalExposure_ to be more explicit? > }; > > } /* namespace RPiController */ > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp > index ed8a3b30..4e60802c 100644 > --- a/src/ipa/rpi/controller/rpi/agc_channel.cpp > +++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp > @@ -175,6 +175,11 @@ readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes, > > int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > { > + auto channelValue = params["channel"].get<unsigned int>(); > + if (!channelValue) > + return -EINVAL; > + channel = *channelValue; > + > std::string boundString = params["bound"].get<std::string>(""); > transform(boundString.begin(), boundString.end(), > boundString.begin(), ::toupper); > @@ -184,15 +189,15 @@ int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > } > bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER; > > - auto value = params["factor"].get<double>(); > - if (!value) > + auto factorValue = params["factor"].get<double>(); > + if (!factorValue) > return -EINVAL; > - factor = *value; > + factor = *factorValue; > > return 0; > } > > -int readChannelConstraints(std::vector<AgcChannelConstraint> channelConstraints, > +int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints, > const libcamera::YamlObject ¶ms) > { > int ret = 0; > @@ -231,6 +236,7 @@ int AgcConfig::read(const libcamera::YamlObject ¶ms) > ret = readChannelConstraints(channelConstraints, params["channel_constraints"]); > if (ret) > return ret; > + LOG(RPiAgc, Info) << "Read " << channelConstraints.size() << " channel constraints"; > } > > ret = yTarget.read(params["y_target"]); > @@ -489,7 +495,8 @@ void AgcChannel::prepare(Metadata *imageMetadata) > } > } > > -void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata) > +void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > + Metadata *imageMetadata, const AgcChannelResults &channelResults) It feels like channelResults/totalExposure could/should be passed in the imageMetdata, but I'm not too bothered with an extra parameter. > { > frameCount_++; > /* > @@ -508,12 +515,17 @@ void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > computeTargetExposure(gain); > /* The results have to be filtered so as not to change too rapidly. */ > filterExposure(); > + /* > + * We may be asked to limit the exposure using other channels. If another channel > + * determines our upper bound we may want to know this later. > + */ > + bool channelBound = applyChannelConstraints(channelResults); > /* > * Some of the exposure has to be applied as digital gain, so work out > - * what that is. This function also tells us whether it's decided to > - * "desaturate" the image more quickly. > + * what that is. It also tells us whether it's trying to desaturate the image > + * more quickly, which can only happen when another channel is not limiting us. > */ > - bool desaturate = applyDigitalGain(gain, targetY); > + bool desaturate = applyDigitalGain(gain, targetY, channelBound); > /* > * The last thing is to divide up the exposure value into a shutter time > * and analogue gain, according to the current exposure mode. > @@ -792,7 +804,49 @@ void AgcChannel::computeTargetExposure(double gain) > LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure; > } > > -bool AgcChannel::applyDigitalGain(double gain, double targetY) > +bool AgcChannel::applyChannelConstraints(const AgcChannelResults &channelResults) > +{ > + bool channelBound = false; > + LOG(RPiAgc, Debug) > + << "Total exposure before channel constraints " << filtered_.totalExposure; > + > + for (const auto &constraint : config_.channelConstraints) { > + LOG(RPiAgc, Debug) > + << "Check constraint: channel " << constraint.channel << " bound " > + << (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER") > + << " factor " << constraint.factor; > + if (constraint.channel >= channelResults.size() || > + !channelResults[constraint.channel]) { > + LOG(RPiAgc, Debug) << "no such channel - skipped"; > + continue; > + } > + > + if (channelResults[constraint.channel] == 0s) { > + LOG(RPiAgc, Debug) << "zero exposure - skipped"; > + continue; > + } > + > + libcamera::utils::Duration limitExposure = > + channelResults[constraint.channel] * constraint.factor; > + LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure; > + if ((constraint.bound == AgcChannelConstraint::Bound::UPPER && > + filtered_.totalExposure > limitExposure) || > + (constraint.bound == AgcChannelConstraint::Bound::LOWER && > + filtered_.totalExposure < limitExposure)) { > + filtered_.totalExposure = limitExposure; > + LOG(RPiAgc, Debug) << "Applies"; > + channelBound = true; > + } else > + LOG(RPiAgc, Debug) << "Does not apply"; > + } > + > + LOG(RPiAgc, Debug) > + << "Total exposure after channel constraints " << filtered_.totalExposure; > + > + return channelBound; > +} > + > +bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound) > { > double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 }); > ASSERT(minColourGain != 0.0); > @@ -812,8 +866,8 @@ bool AgcChannel::applyDigitalGain(double gain, double targetY) > * quickly (and we then approach the correct value more quickly from > * below). > */ > - bool desaturate = targetY > config_.fastReduceThreshold && > - gain < sqrt(targetY); > + bool desaturate = !channelBound && > + targetY > config_.fastReduceThreshold && gain < sqrt(targetY); > if (desaturate) > dg /= config_.fastReduceThreshold; > LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate; > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h > index 446125ef..7ce34360 100644 > --- a/src/ipa/rpi/controller/rpi/agc_channel.h > +++ b/src/ipa/rpi/controller/rpi/agc_channel.h > @@ -19,6 +19,8 @@ > > namespace RPiController { > > +using AgcChannelResults = std::vector<libcamera::utils::Duration>; > + > struct AgcMeteringMode { > std::vector<double> weights; > int read(const libcamera::YamlObject ¶ms); > @@ -93,7 +95,8 @@ public: > void disableAuto(); > void switchMode(CameraMode const &cameraMode, Metadata *metadata); > void prepare(Metadata *imageMetadata); > - void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata); > + void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata, > + const AgcChannelResults &channelResults); > > private: > bool updateLockStatus(DeviceStatus const &deviceStatus); > @@ -105,7 +108,8 @@ private: > double &gain, double &targetY); > void computeTargetExposure(double gain); > void filterExposure(); > - bool applyDigitalGain(double gain, double targetY); > + bool applyChannelConstraints(const AgcChannelResults &channelResults); > + bool applyDigitalGain(double gain, double targetY, bool channelBound); > void divideUpExposure(); > void writeAndFinish(Metadata *imageMetadata, bool desaturate); > libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter); > diff --git a/src/ipa/rpi/vc4/data/imx477.json b/src/ipa/rpi/vc4/data/imx477.json > index daffc268..bbe6da0d 100644 > --- a/src/ipa/rpi/vc4/data/imx477.json > +++ b/src/ipa/rpi/vc4/data/imx477.json > @@ -515,4 +515,5 @@ > "rpi.sharpen": { } > } > ] > -} > \ No newline at end of file > +} > + Added by accident? Other than that: Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > -- > 2.30.2 >
Hi Naush Thanks for the comments. On Tue, 22 Aug 2023 at 14:13, Naushir Patuck <naush@raspberrypi.com> wrote: > > Hi David, > > Thank you for your patch. > > On Mon, 31 Jul 2023 at 10:47, David Plowman via libcamera-devel > <libcamera-devel@lists.libcamera.org> wrote: > > > > Whenever we run Agc::process(), we store the most recent total > > exposure requested for each channel. > > > > With these values we can apply the channel constraints after > > time-filtering the requested total exposure, but before working out > > how much digital gain is needed. > > > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > > --- > > src/ipa/rpi/controller/rpi/agc.cpp | 21 ++++-- > > src/ipa/rpi/controller/rpi/agc.h | 1 + > > src/ipa/rpi/controller/rpi/agc_channel.cpp | 76 ++++++++++++++++++---- > > src/ipa/rpi/controller/rpi/agc_channel.h | 8 ++- > > src/ipa/rpi/vc4/data/imx477.json | 3 +- > > 5 files changed, 90 insertions(+), 19 deletions(-) > > > > diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp > > index 7e627bba..7077fbff 100644 > > --- a/src/ipa/rpi/controller/rpi/agc.cpp > > +++ b/src/ipa/rpi/controller/rpi/agc.cpp > > @@ -40,6 +40,7 @@ int Agc::read(const libcamera::YamlObject ¶ms) > > */ > > if (!params.contains("channels")) { > > LOG(RPiAgc, Debug) << "Single channel only"; > > + channelResults_.resize(1, 0s); > > channelData_.emplace_back(); > > return channelData_.back().channel.read(params, getHardwareConfig()); > > } > > @@ -59,6 +60,8 @@ int Agc::read(const libcamera::YamlObject ¶ms) > > return -1; > > } > > > > + channelResults_.resize(channelData_.size(), 0s); > > + > > return 0; > > } > > > > @@ -234,15 +237,21 @@ static void getChannelIndex(Metadata *metadata, const char *message, unsigned in > > LOG(RPiAgc, Debug) << message; > > } > > > > -static void setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > > +static libcamera::utils::Duration > > +setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > > { > > std::unique_lock<RPiController::Metadata> lock(*metadata); > > AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status"); > > - if (status) > > + libcamera::utils::Duration dur = 0s; > > + > > + if (status) { > > status->channel = channelIndex; > > - else > > + dur = status->totalExposureValue; > > + } else > > /* This does happen at startup, otherwise it would be a Warning or Error. */ > > LOG(RPiAgc, Debug) << message; > > + > > + return dur; > > } > > > > void Agc::prepare(Metadata *imageMetadata) > > @@ -306,8 +315,10 @@ void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata) > > /* Can also happen when new channels start. */ > > LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet"; > > > > - channelData.channel.process(stats, &deviceStatus, imageMetadata); > > - setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > > + channelData.channel.process(stats, &deviceStatus, imageMetadata, channelResults_); > > + auto dur = setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > > + if (dur) > > + channelResults_[channelIndex] = dur; > > > > /* And onto the next channel for the next call. */ > > index_ = (index_ + 1) % activeChannels_.size(); > > diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h > > index 2eed2bab..acd6dc2f 100644 > > --- a/src/ipa/rpi/controller/rpi/agc.h > > +++ b/src/ipa/rpi/controller/rpi/agc.h > > @@ -54,6 +54,7 @@ private: > > std::vector<AgcChannelData> channelData_; > > std::vector<unsigned int> activeChannels_; > > unsigned int index_; /* index into the activeChannels_ */ > > + AgcChannelResults channelResults_; > > It took me a while to understand what channelResults_ was. > Perhaps we can rename this to channelTotalExposure_ to be more explicit? Yes, I guess that I initially thought there might be more stuff here, but given that there isn't, renaming would make it clearer! > > > }; > > > > } /* namespace RPiController */ > > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp > > index ed8a3b30..4e60802c 100644 > > --- a/src/ipa/rpi/controller/rpi/agc_channel.cpp > > +++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp > > @@ -175,6 +175,11 @@ readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes, > > > > int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > > { > > + auto channelValue = params["channel"].get<unsigned int>(); > > + if (!channelValue) > > + return -EINVAL; > > + channel = *channelValue; > > + > > std::string boundString = params["bound"].get<std::string>(""); > > transform(boundString.begin(), boundString.end(), > > boundString.begin(), ::toupper); > > @@ -184,15 +189,15 @@ int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > > } > > bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER; > > > > - auto value = params["factor"].get<double>(); > > - if (!value) > > + auto factorValue = params["factor"].get<double>(); > > + if (!factorValue) > > return -EINVAL; > > - factor = *value; > > + factor = *factorValue; > > > > return 0; > > } > > > > -int readChannelConstraints(std::vector<AgcChannelConstraint> channelConstraints, > > +int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints, > > const libcamera::YamlObject ¶ms) > > { > > int ret = 0; > > @@ -231,6 +236,7 @@ int AgcConfig::read(const libcamera::YamlObject ¶ms) > > ret = readChannelConstraints(channelConstraints, params["channel_constraints"]); > > if (ret) > > return ret; > > + LOG(RPiAgc, Info) << "Read " << channelConstraints.size() << " channel constraints"; > > } > > > > ret = yTarget.read(params["y_target"]); > > @@ -489,7 +495,8 @@ void AgcChannel::prepare(Metadata *imageMetadata) > > } > > } > > > > -void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata) > > +void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > > + Metadata *imageMetadata, const AgcChannelResults &channelResults) > > It feels like channelResults/totalExposure could/should be passed in > the imageMetdata, but I'm not too bothered with an extra parameter. I'm somewhat inclined to the notion that metadata is for storing stuff about _this_ image, not really just a means of passing extra stuff around. Though the line does get rather blurred, I admit! So I'm slightly veering towards leaving this one... > > > { > > frameCount_++; > > /* > > @@ -508,12 +515,17 @@ void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > > computeTargetExposure(gain); > > /* The results have to be filtered so as not to change too rapidly. */ > > filterExposure(); > > + /* > > + * We may be asked to limit the exposure using other channels. If another channel > > + * determines our upper bound we may want to know this later. > > + */ > > + bool channelBound = applyChannelConstraints(channelResults); > > /* > > * Some of the exposure has to be applied as digital gain, so work out > > - * what that is. This function also tells us whether it's decided to > > - * "desaturate" the image more quickly. > > + * what that is. It also tells us whether it's trying to desaturate the image > > + * more quickly, which can only happen when another channel is not limiting us. > > */ > > - bool desaturate = applyDigitalGain(gain, targetY); > > + bool desaturate = applyDigitalGain(gain, targetY, channelBound); > > /* > > * The last thing is to divide up the exposure value into a shutter time > > * and analogue gain, according to the current exposure mode. > > @@ -792,7 +804,49 @@ void AgcChannel::computeTargetExposure(double gain) > > LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure; > > } > > > > -bool AgcChannel::applyDigitalGain(double gain, double targetY) > > +bool AgcChannel::applyChannelConstraints(const AgcChannelResults &channelResults) > > +{ > > + bool channelBound = false; > > + LOG(RPiAgc, Debug) > > + << "Total exposure before channel constraints " << filtered_.totalExposure; > > + > > + for (const auto &constraint : config_.channelConstraints) { > > + LOG(RPiAgc, Debug) > > + << "Check constraint: channel " << constraint.channel << " bound " > > + << (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER") > > + << " factor " << constraint.factor; > > + if (constraint.channel >= channelResults.size() || > > + !channelResults[constraint.channel]) { > > + LOG(RPiAgc, Debug) << "no such channel - skipped"; > > + continue; > > + } > > + > > + if (channelResults[constraint.channel] == 0s) { > > + LOG(RPiAgc, Debug) << "zero exposure - skipped"; > > + continue; > > + } > > + > > + libcamera::utils::Duration limitExposure = > > + channelResults[constraint.channel] * constraint.factor; > > + LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure; > > + if ((constraint.bound == AgcChannelConstraint::Bound::UPPER && > > + filtered_.totalExposure > limitExposure) || > > + (constraint.bound == AgcChannelConstraint::Bound::LOWER && > > + filtered_.totalExposure < limitExposure)) { > > + filtered_.totalExposure = limitExposure; > > + LOG(RPiAgc, Debug) << "Applies"; > > + channelBound = true; > > + } else > > + LOG(RPiAgc, Debug) << "Does not apply"; > > + } > > + > > + LOG(RPiAgc, Debug) > > + << "Total exposure after channel constraints " << filtered_.totalExposure; > > + > > + return channelBound; > > +} > > + > > +bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound) > > { > > double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 }); > > ASSERT(minColourGain != 0.0); > > @@ -812,8 +866,8 @@ bool AgcChannel::applyDigitalGain(double gain, double targetY) > > * quickly (and we then approach the correct value more quickly from > > * below). > > */ > > - bool desaturate = targetY > config_.fastReduceThreshold && > > - gain < sqrt(targetY); > > + bool desaturate = !channelBound && > > + targetY > config_.fastReduceThreshold && gain < sqrt(targetY); > > if (desaturate) > > dg /= config_.fastReduceThreshold; > > LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate; > > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h > > index 446125ef..7ce34360 100644 > > --- a/src/ipa/rpi/controller/rpi/agc_channel.h > > +++ b/src/ipa/rpi/controller/rpi/agc_channel.h > > @@ -19,6 +19,8 @@ > > > > namespace RPiController { > > > > +using AgcChannelResults = std::vector<libcamera::utils::Duration>; > > + > > struct AgcMeteringMode { > > std::vector<double> weights; > > int read(const libcamera::YamlObject ¶ms); > > @@ -93,7 +95,8 @@ public: > > void disableAuto(); > > void switchMode(CameraMode const &cameraMode, Metadata *metadata); > > void prepare(Metadata *imageMetadata); > > - void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata); > > + void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata, > > + const AgcChannelResults &channelResults); > > > > private: > > bool updateLockStatus(DeviceStatus const &deviceStatus); > > @@ -105,7 +108,8 @@ private: > > double &gain, double &targetY); > > void computeTargetExposure(double gain); > > void filterExposure(); > > - bool applyDigitalGain(double gain, double targetY); > > + bool applyChannelConstraints(const AgcChannelResults &channelResults); > > + bool applyDigitalGain(double gain, double targetY, bool channelBound); > > void divideUpExposure(); > > void writeAndFinish(Metadata *imageMetadata, bool desaturate); > > libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter); > > diff --git a/src/ipa/rpi/vc4/data/imx477.json b/src/ipa/rpi/vc4/data/imx477.json > > index daffc268..bbe6da0d 100644 > > --- a/src/ipa/rpi/vc4/data/imx477.json > > +++ b/src/ipa/rpi/vc4/data/imx477.json > > @@ -515,4 +515,5 @@ > > "rpi.sharpen": { } > > } > > ] > > -} > > \ No newline at end of file > > +} > > + > > Added by accident? I blame my editor! I will fight with it until it relents... Thanks David > > Other than that: > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > > > -- > > 2.30.2 > >
Hi David, On Wed, 23 Aug 2023 at 10:42, David Plowman <david.plowman@raspberrypi.com> wrote: > > Hi Naush > > Thanks for the comments. > > On Tue, 22 Aug 2023 at 14:13, Naushir Patuck <naush@raspberrypi.com> wrote: > > > > Hi David, > > > > Thank you for your patch. > > > > On Mon, 31 Jul 2023 at 10:47, David Plowman via libcamera-devel > > <libcamera-devel@lists.libcamera.org> wrote: > > > > > > Whenever we run Agc::process(), we store the most recent total > > > exposure requested for each channel. > > > > > > With these values we can apply the channel constraints after > > > time-filtering the requested total exposure, but before working out > > > how much digital gain is needed. > > > > > > Signed-off-by: David Plowman <david.plowman@raspberrypi.com> > > > --- > > > src/ipa/rpi/controller/rpi/agc.cpp | 21 ++++-- > > > src/ipa/rpi/controller/rpi/agc.h | 1 + > > > src/ipa/rpi/controller/rpi/agc_channel.cpp | 76 ++++++++++++++++++---- > > > src/ipa/rpi/controller/rpi/agc_channel.h | 8 ++- > > > src/ipa/rpi/vc4/data/imx477.json | 3 +- > > > 5 files changed, 90 insertions(+), 19 deletions(-) > > > > > > diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp > > > index 7e627bba..7077fbff 100644 > > > --- a/src/ipa/rpi/controller/rpi/agc.cpp > > > +++ b/src/ipa/rpi/controller/rpi/agc.cpp > > > @@ -40,6 +40,7 @@ int Agc::read(const libcamera::YamlObject ¶ms) > > > */ > > > if (!params.contains("channels")) { > > > LOG(RPiAgc, Debug) << "Single channel only"; > > > + channelResults_.resize(1, 0s); > > > channelData_.emplace_back(); > > > return channelData_.back().channel.read(params, getHardwareConfig()); > > > } > > > @@ -59,6 +60,8 @@ int Agc::read(const libcamera::YamlObject ¶ms) > > > return -1; > > > } > > > > > > + channelResults_.resize(channelData_.size(), 0s); > > > + > > > return 0; > > > } > > > > > > @@ -234,15 +237,21 @@ static void getChannelIndex(Metadata *metadata, const char *message, unsigned in > > > LOG(RPiAgc, Debug) << message; > > > } > > > > > > -static void setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > > > +static libcamera::utils::Duration > > > +setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) > > > { > > > std::unique_lock<RPiController::Metadata> lock(*metadata); > > > AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status"); > > > - if (status) > > > + libcamera::utils::Duration dur = 0s; > > > + > > > + if (status) { > > > status->channel = channelIndex; > > > - else > > > + dur = status->totalExposureValue; > > > + } else > > > /* This does happen at startup, otherwise it would be a Warning or Error. */ > > > LOG(RPiAgc, Debug) << message; > > > + > > > + return dur; > > > } > > > > > > void Agc::prepare(Metadata *imageMetadata) > > > @@ -306,8 +315,10 @@ void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata) > > > /* Can also happen when new channels start. */ > > > LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet"; > > > > > > - channelData.channel.process(stats, &deviceStatus, imageMetadata); > > > - setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > > > + channelData.channel.process(stats, &deviceStatus, imageMetadata, channelResults_); > > > + auto dur = setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); > > > + if (dur) > > > + channelResults_[channelIndex] = dur; > > > > > > /* And onto the next channel for the next call. */ > > > index_ = (index_ + 1) % activeChannels_.size(); > > > diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h > > > index 2eed2bab..acd6dc2f 100644 > > > --- a/src/ipa/rpi/controller/rpi/agc.h > > > +++ b/src/ipa/rpi/controller/rpi/agc.h > > > @@ -54,6 +54,7 @@ private: > > > std::vector<AgcChannelData> channelData_; > > > std::vector<unsigned int> activeChannels_; > > > unsigned int index_; /* index into the activeChannels_ */ > > > + AgcChannelResults channelResults_; > > > > It took me a while to understand what channelResults_ was. > > Perhaps we can rename this to channelTotalExposure_ to be more explicit? > > Yes, I guess that I initially thought there might be more stuff here, > but given that there isn't, renaming would make it clearer! > > > > > > }; > > > > > > } /* namespace RPiController */ > > > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp > > > index ed8a3b30..4e60802c 100644 > > > --- a/src/ipa/rpi/controller/rpi/agc_channel.cpp > > > +++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp > > > @@ -175,6 +175,11 @@ readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes, > > > > > > int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > > > { > > > + auto channelValue = params["channel"].get<unsigned int>(); > > > + if (!channelValue) > > > + return -EINVAL; > > > + channel = *channelValue; > > > + > > > std::string boundString = params["bound"].get<std::string>(""); > > > transform(boundString.begin(), boundString.end(), > > > boundString.begin(), ::toupper); > > > @@ -184,15 +189,15 @@ int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) > > > } > > > bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER; > > > > > > - auto value = params["factor"].get<double>(); > > > - if (!value) > > > + auto factorValue = params["factor"].get<double>(); > > > + if (!factorValue) > > > return -EINVAL; > > > - factor = *value; > > > + factor = *factorValue; > > > > > > return 0; > > > } > > > > > > -int readChannelConstraints(std::vector<AgcChannelConstraint> channelConstraints, > > > +int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints, > > > const libcamera::YamlObject ¶ms) > > > { > > > int ret = 0; > > > @@ -231,6 +236,7 @@ int AgcConfig::read(const libcamera::YamlObject ¶ms) > > > ret = readChannelConstraints(channelConstraints, params["channel_constraints"]); > > > if (ret) > > > return ret; > > > + LOG(RPiAgc, Info) << "Read " << channelConstraints.size() << " channel constraints"; > > > } > > > > > > ret = yTarget.read(params["y_target"]); > > > @@ -489,7 +495,8 @@ void AgcChannel::prepare(Metadata *imageMetadata) > > > } > > > } > > > > > > -void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata) > > > +void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > > > + Metadata *imageMetadata, const AgcChannelResults &channelResults) > > > > It feels like channelResults/totalExposure could/should be passed in > > the imageMetdata, but I'm not too bothered with an extra parameter. > > I'm somewhat inclined to the notion that metadata is for storing stuff > about _this_ image, not really just a means of passing extra stuff > around. Though the line does get rather blurred, I admit! So I'm > slightly veering towards leaving this one... Sure, let's leave it as-is. > > > > > > { > > > frameCount_++; > > > /* > > > @@ -508,12 +515,17 @@ void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, > > > computeTargetExposure(gain); > > > /* The results have to be filtered so as not to change too rapidly. */ > > > filterExposure(); > > > + /* > > > + * We may be asked to limit the exposure using other channels. If another channel > > > + * determines our upper bound we may want to know this later. > > > + */ > > > + bool channelBound = applyChannelConstraints(channelResults); > > > /* > > > * Some of the exposure has to be applied as digital gain, so work out > > > - * what that is. This function also tells us whether it's decided to > > > - * "desaturate" the image more quickly. > > > + * what that is. It also tells us whether it's trying to desaturate the image > > > + * more quickly, which can only happen when another channel is not limiting us. > > > */ > > > - bool desaturate = applyDigitalGain(gain, targetY); > > > + bool desaturate = applyDigitalGain(gain, targetY, channelBound); > > > /* > > > * The last thing is to divide up the exposure value into a shutter time > > > * and analogue gain, according to the current exposure mode. > > > @@ -792,7 +804,49 @@ void AgcChannel::computeTargetExposure(double gain) > > > LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure; > > > } > > > > > > -bool AgcChannel::applyDigitalGain(double gain, double targetY) > > > +bool AgcChannel::applyChannelConstraints(const AgcChannelResults &channelResults) > > > +{ > > > + bool channelBound = false; > > > + LOG(RPiAgc, Debug) > > > + << "Total exposure before channel constraints " << filtered_.totalExposure; > > > + > > > + for (const auto &constraint : config_.channelConstraints) { > > > + LOG(RPiAgc, Debug) > > > + << "Check constraint: channel " << constraint.channel << " bound " > > > + << (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER") > > > + << " factor " << constraint.factor; > > > + if (constraint.channel >= channelResults.size() || > > > + !channelResults[constraint.channel]) { > > > + LOG(RPiAgc, Debug) << "no such channel - skipped"; > > > + continue; > > > + } > > > + > > > + if (channelResults[constraint.channel] == 0s) { > > > + LOG(RPiAgc, Debug) << "zero exposure - skipped"; > > > + continue; > > > + } > > > + > > > + libcamera::utils::Duration limitExposure = > > > + channelResults[constraint.channel] * constraint.factor; > > > + LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure; > > > + if ((constraint.bound == AgcChannelConstraint::Bound::UPPER && > > > + filtered_.totalExposure > limitExposure) || > > > + (constraint.bound == AgcChannelConstraint::Bound::LOWER && > > > + filtered_.totalExposure < limitExposure)) { > > > + filtered_.totalExposure = limitExposure; > > > + LOG(RPiAgc, Debug) << "Applies"; > > > + channelBound = true; > > > + } else > > > + LOG(RPiAgc, Debug) << "Does not apply"; > > > + } > > > + > > > + LOG(RPiAgc, Debug) > > > + << "Total exposure after channel constraints " << filtered_.totalExposure; > > > + > > > + return channelBound; > > > +} > > > + > > > +bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound) > > > { > > > double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 }); > > > ASSERT(minColourGain != 0.0); > > > @@ -812,8 +866,8 @@ bool AgcChannel::applyDigitalGain(double gain, double targetY) > > > * quickly (and we then approach the correct value more quickly from > > > * below). > > > */ > > > - bool desaturate = targetY > config_.fastReduceThreshold && > > > - gain < sqrt(targetY); > > > + bool desaturate = !channelBound && > > > + targetY > config_.fastReduceThreshold && gain < sqrt(targetY); > > > if (desaturate) > > > dg /= config_.fastReduceThreshold; > > > LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate; > > > diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h > > > index 446125ef..7ce34360 100644 > > > --- a/src/ipa/rpi/controller/rpi/agc_channel.h > > > +++ b/src/ipa/rpi/controller/rpi/agc_channel.h > > > @@ -19,6 +19,8 @@ > > > > > > namespace RPiController { > > > > > > +using AgcChannelResults = std::vector<libcamera::utils::Duration>; > > > + > > > struct AgcMeteringMode { > > > std::vector<double> weights; > > > int read(const libcamera::YamlObject ¶ms); > > > @@ -93,7 +95,8 @@ public: > > > void disableAuto(); > > > void switchMode(CameraMode const &cameraMode, Metadata *metadata); > > > void prepare(Metadata *imageMetadata); > > > - void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata); > > > + void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata, > > > + const AgcChannelResults &channelResults); > > > > > > private: > > > bool updateLockStatus(DeviceStatus const &deviceStatus); > > > @@ -105,7 +108,8 @@ private: > > > double &gain, double &targetY); > > > void computeTargetExposure(double gain); > > > void filterExposure(); > > > - bool applyDigitalGain(double gain, double targetY); > > > + bool applyChannelConstraints(const AgcChannelResults &channelResults); > > > + bool applyDigitalGain(double gain, double targetY, bool channelBound); > > > void divideUpExposure(); > > > void writeAndFinish(Metadata *imageMetadata, bool desaturate); > > > libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter); > > > diff --git a/src/ipa/rpi/vc4/data/imx477.json b/src/ipa/rpi/vc4/data/imx477.json > > > index daffc268..bbe6da0d 100644 > > > --- a/src/ipa/rpi/vc4/data/imx477.json > > > +++ b/src/ipa/rpi/vc4/data/imx477.json > > > @@ -515,4 +515,5 @@ > > > "rpi.sharpen": { } > > > } > > > ] > > > -} > > > \ No newline at end of file > > > +} > > > + > > > > Added by accident? > > I blame my editor! I will fight with it until it relents... > > Thanks > David > > > > > Other than that: > > Reviewed-by: Naushir Patuck <naush@raspberrypi.com> > > > > > -- > > > 2.30.2 > > >
diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp index 7e627bba..7077fbff 100644 --- a/src/ipa/rpi/controller/rpi/agc.cpp +++ b/src/ipa/rpi/controller/rpi/agc.cpp @@ -40,6 +40,7 @@ int Agc::read(const libcamera::YamlObject ¶ms) */ if (!params.contains("channels")) { LOG(RPiAgc, Debug) << "Single channel only"; + channelResults_.resize(1, 0s); channelData_.emplace_back(); return channelData_.back().channel.read(params, getHardwareConfig()); } @@ -59,6 +60,8 @@ int Agc::read(const libcamera::YamlObject ¶ms) return -1; } + channelResults_.resize(channelData_.size(), 0s); + return 0; } @@ -234,15 +237,21 @@ static void getChannelIndex(Metadata *metadata, const char *message, unsigned in LOG(RPiAgc, Debug) << message; } -static void setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) +static libcamera::utils::Duration +setChannelIndex(Metadata *metadata, const char *message, unsigned int channelIndex) { std::unique_lock<RPiController::Metadata> lock(*metadata); AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status"); - if (status) + libcamera::utils::Duration dur = 0s; + + if (status) { status->channel = channelIndex; - else + dur = status->totalExposureValue; + } else /* This does happen at startup, otherwise it would be a Warning or Error. */ LOG(RPiAgc, Debug) << message; + + return dur; } void Agc::prepare(Metadata *imageMetadata) @@ -306,8 +315,10 @@ void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata) /* Can also happen when new channels start. */ LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet"; - channelData.channel.process(stats, &deviceStatus, imageMetadata); - setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); + channelData.channel.process(stats, &deviceStatus, imageMetadata, channelResults_); + auto dur = setChannelIndex(imageMetadata, "process: no AGC status found", channelIndex); + if (dur) + channelResults_[channelIndex] = dur; /* And onto the next channel for the next call. */ index_ = (index_ + 1) % activeChannels_.size(); diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h index 2eed2bab..acd6dc2f 100644 --- a/src/ipa/rpi/controller/rpi/agc.h +++ b/src/ipa/rpi/controller/rpi/agc.h @@ -54,6 +54,7 @@ private: std::vector<AgcChannelData> channelData_; std::vector<unsigned int> activeChannels_; unsigned int index_; /* index into the activeChannels_ */ + AgcChannelResults channelResults_; }; } /* namespace RPiController */ diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp index ed8a3b30..4e60802c 100644 --- a/src/ipa/rpi/controller/rpi/agc_channel.cpp +++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp @@ -175,6 +175,11 @@ readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes, int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) { + auto channelValue = params["channel"].get<unsigned int>(); + if (!channelValue) + return -EINVAL; + channel = *channelValue; + std::string boundString = params["bound"].get<std::string>(""); transform(boundString.begin(), boundString.end(), boundString.begin(), ::toupper); @@ -184,15 +189,15 @@ int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms) } bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER; - auto value = params["factor"].get<double>(); - if (!value) + auto factorValue = params["factor"].get<double>(); + if (!factorValue) return -EINVAL; - factor = *value; + factor = *factorValue; return 0; } -int readChannelConstraints(std::vector<AgcChannelConstraint> channelConstraints, +int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints, const libcamera::YamlObject ¶ms) { int ret = 0; @@ -231,6 +236,7 @@ int AgcConfig::read(const libcamera::YamlObject ¶ms) ret = readChannelConstraints(channelConstraints, params["channel_constraints"]); if (ret) return ret; + LOG(RPiAgc, Info) << "Read " << channelConstraints.size() << " channel constraints"; } ret = yTarget.read(params["y_target"]); @@ -489,7 +495,8 @@ void AgcChannel::prepare(Metadata *imageMetadata) } } -void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata) +void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, + Metadata *imageMetadata, const AgcChannelResults &channelResults) { frameCount_++; /* @@ -508,12 +515,17 @@ void AgcChannel::process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, computeTargetExposure(gain); /* The results have to be filtered so as not to change too rapidly. */ filterExposure(); + /* + * We may be asked to limit the exposure using other channels. If another channel + * determines our upper bound we may want to know this later. + */ + bool channelBound = applyChannelConstraints(channelResults); /* * Some of the exposure has to be applied as digital gain, so work out - * what that is. This function also tells us whether it's decided to - * "desaturate" the image more quickly. + * what that is. It also tells us whether it's trying to desaturate the image + * more quickly, which can only happen when another channel is not limiting us. */ - bool desaturate = applyDigitalGain(gain, targetY); + bool desaturate = applyDigitalGain(gain, targetY, channelBound); /* * The last thing is to divide up the exposure value into a shutter time * and analogue gain, according to the current exposure mode. @@ -792,7 +804,49 @@ void AgcChannel::computeTargetExposure(double gain) LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure; } -bool AgcChannel::applyDigitalGain(double gain, double targetY) +bool AgcChannel::applyChannelConstraints(const AgcChannelResults &channelResults) +{ + bool channelBound = false; + LOG(RPiAgc, Debug) + << "Total exposure before channel constraints " << filtered_.totalExposure; + + for (const auto &constraint : config_.channelConstraints) { + LOG(RPiAgc, Debug) + << "Check constraint: channel " << constraint.channel << " bound " + << (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER") + << " factor " << constraint.factor; + if (constraint.channel >= channelResults.size() || + !channelResults[constraint.channel]) { + LOG(RPiAgc, Debug) << "no such channel - skipped"; + continue; + } + + if (channelResults[constraint.channel] == 0s) { + LOG(RPiAgc, Debug) << "zero exposure - skipped"; + continue; + } + + libcamera::utils::Duration limitExposure = + channelResults[constraint.channel] * constraint.factor; + LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure; + if ((constraint.bound == AgcChannelConstraint::Bound::UPPER && + filtered_.totalExposure > limitExposure) || + (constraint.bound == AgcChannelConstraint::Bound::LOWER && + filtered_.totalExposure < limitExposure)) { + filtered_.totalExposure = limitExposure; + LOG(RPiAgc, Debug) << "Applies"; + channelBound = true; + } else + LOG(RPiAgc, Debug) << "Does not apply"; + } + + LOG(RPiAgc, Debug) + << "Total exposure after channel constraints " << filtered_.totalExposure; + + return channelBound; +} + +bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound) { double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 }); ASSERT(minColourGain != 0.0); @@ -812,8 +866,8 @@ bool AgcChannel::applyDigitalGain(double gain, double targetY) * quickly (and we then approach the correct value more quickly from * below). */ - bool desaturate = targetY > config_.fastReduceThreshold && - gain < sqrt(targetY); + bool desaturate = !channelBound && + targetY > config_.fastReduceThreshold && gain < sqrt(targetY); if (desaturate) dg /= config_.fastReduceThreshold; LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate; diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h index 446125ef..7ce34360 100644 --- a/src/ipa/rpi/controller/rpi/agc_channel.h +++ b/src/ipa/rpi/controller/rpi/agc_channel.h @@ -19,6 +19,8 @@ namespace RPiController { +using AgcChannelResults = std::vector<libcamera::utils::Duration>; + struct AgcMeteringMode { std::vector<double> weights; int read(const libcamera::YamlObject ¶ms); @@ -93,7 +95,8 @@ public: void disableAuto(); void switchMode(CameraMode const &cameraMode, Metadata *metadata); void prepare(Metadata *imageMetadata); - void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata); + void process(StatisticsPtr &stats, const DeviceStatus *deviceStatus, Metadata *imageMetadata, + const AgcChannelResults &channelResults); private: bool updateLockStatus(DeviceStatus const &deviceStatus); @@ -105,7 +108,8 @@ private: double &gain, double &targetY); void computeTargetExposure(double gain); void filterExposure(); - bool applyDigitalGain(double gain, double targetY); + bool applyChannelConstraints(const AgcChannelResults &channelResults); + bool applyDigitalGain(double gain, double targetY, bool channelBound); void divideUpExposure(); void writeAndFinish(Metadata *imageMetadata, bool desaturate); libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter); diff --git a/src/ipa/rpi/vc4/data/imx477.json b/src/ipa/rpi/vc4/data/imx477.json index daffc268..bbe6da0d 100644 --- a/src/ipa/rpi/vc4/data/imx477.json +++ b/src/ipa/rpi/vc4/data/imx477.json @@ -515,4 +515,5 @@ "rpi.sharpen": { } } ] -} \ No newline at end of file +} +
Whenever we run Agc::process(), we store the most recent total exposure requested for each channel. With these values we can apply the channel constraints after time-filtering the requested total exposure, but before working out how much digital gain is needed. Signed-off-by: David Plowman <david.plowman@raspberrypi.com> --- src/ipa/rpi/controller/rpi/agc.cpp | 21 ++++-- src/ipa/rpi/controller/rpi/agc.h | 1 + src/ipa/rpi/controller/rpi/agc_channel.cpp | 76 ++++++++++++++++++---- src/ipa/rpi/controller/rpi/agc_channel.h | 8 ++- src/ipa/rpi/vc4/data/imx477.json | 3 +- 5 files changed, 90 insertions(+), 19 deletions(-)