From patchwork Fri Feb 10 19:37:16 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Pavel Machek X-Patchwork-Id: 18274 Return-Path: X-Original-To: parsemail@patchwork.libcamera.org Delivered-To: parsemail@patchwork.libcamera.org Received: from lancelot.ideasonboard.com (lancelot.ideasonboard.com [92.243.16.209]) by patchwork.libcamera.org (Postfix) with ESMTPS id 6232EBDB1C for ; Fri, 10 Feb 2023 19:37:22 +0000 (UTC) Received: from lancelot.ideasonboard.com (localhost [IPv6:::1]) by lancelot.ideasonboard.com (Postfix) with ESMTP id 8ED7D625F4; Fri, 10 Feb 2023 20:37:21 +0100 (CET) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/simple; d=libcamera.org; s=mail; t=1676057841; bh=Yzi+DiKForZTYOGO4VYPK2M+ZpC2d69AfqgOzMzmvx4=; h=Date:To:Subject:List-Id:List-Unsubscribe:List-Archive:List-Post: List-Help:List-Subscribe:From:Reply-To:From; b=nyyPhADHnLzP56bOS1r0L5E1x4Sju+y908VfDQK82h9UqAVzr8/QQFbT9dZzuj0WH sbUBMxt4NvTfDzJLyMDT/JKE5TdKhmSBYPNSJtMxkoefP+svUvR3YNtyzCI2iQwo1t cxjhfeChhqt7an7zvSo0zTabVlRkluaZs3zz2dHxNuhyKnmB+HCxqP3Lplbt6d0ayy JNxWvpV+7PsoNqCWt6Nk6i/PTk9Vtb/QupcukrOExCx/G4Mh4p9M1oukWPP0FWC3US hvqs3b6xW0YhpV9K6VQBV38oHayVJVy46di/JWF2l7a+tkmdbcn1FTQwIeFil1erNH 76YJCVJMHy+QA== Received: from jabberwock.ucw.cz (jabberwock.ucw.cz [46.255.230.98]) by lancelot.ideasonboard.com (Postfix) with ESMTPS id DD83D625CE for ; Fri, 10 Feb 2023 20:37:19 +0100 (CET) Authentication-Results: lancelot.ideasonboard.com; dkim=pass (1024-bit key; unprotected) header.d=ucw.cz header.i=@ucw.cz header.b="Y45kRA1h"; dkim-atps=neutral Received: by jabberwock.ucw.cz (Postfix, from userid 1017) id B83FB1C0AB3; Fri, 10 Feb 2023 20:37:18 +0100 (CET) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=ucw.cz; s=gen1; t=1676057838; h=from:from:reply-to:subject:subject:date:date:message-id:message-id: to:to:cc:mime-version:mime-version:content-type:content-type; bh=BYxLhXQ5vJOW0HJQURd4vtw9aoN0s/mMsujPHC1RcCI=; b=Y45kRA1h03RbljHz4zHlzdPKqA6tByMrCRDokVrUU+qWUemkrTmyU2CfVlp5mCU4cBWBIs 1oUavT+4puADmDrsNRELwO2YVTS6TcrmsEbYj6IINv5tEghTwQB+UXpBF3O0pZhU7fbDdl GGHt+IrnGCy6Q2720+MYfcqj5NiZs8c= Date: Fri, 10 Feb 2023 20:37:16 +0100 To: kieran.bingham@ideasonboard.com, libcamera-devel@lists.libcamera.org Message-ID: MIME-Version: 1.0 Content-Disposition: inline Subject: [libcamera-devel] [RFC] CPU-only auto-exposure, and where to put it X-BeenThere: libcamera-devel@lists.libcamera.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-Patchwork-Original-From: Pavel Machek via libcamera-devel From: Pavel Machek Reply-To: Pavel Machek Errors-To: libcamera-devel-bounces@lists.libcamera.org Sender: "libcamera-devel" Hi! So I have this, which kind-of works on PinePhone and Librem 5. I started with autoexposure. AgcExposureMode and divideUpExposure are from RPi code, I'm not sure how to reuse the code. I guess I should convert statistics to histograms. I have placed my hooks in SimpleCameraData::bufferReady. Is that reasonable or is there better place? Where should the code go? It is now in src/libcamera/pipeline/simple/simple.cpp, would something like src/libcamera/pipeline/simple/ae.cpp be suitable? Don't look at the code too much, it clearly needs... more work. Best regards, Pavel diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp index 24ded4db..92d2e8a5 100644 --- a/src/libcamera/pipeline/simple/simple.cpp +++ b/src/libcamera/pipeline/simple/simple.cpp @@ -27,6 +28,7 @@ #include #include #include +#include #include "libcamera/internal/camera.h" #include "libcamera/internal/camera_sensor.h" @@ -36,7 +38,9 @@ #include "libcamera/internal/pipeline_handler.h" #include "libcamera/internal/v4l2_subdevice.h" #include "libcamera/internal/v4l2_videodevice.h" +#include "libcamera/internal/mapped_framebuffer.h" +using libcamera::utils::Duration; namespace libcamera { @@ -213,6 +217,7 @@ public: int setupFormats(V4L2SubdeviceFormat *format, V4L2Subdevice::Whence whence); void bufferReady(FrameBuffer *buffer); + void autoProcessing(Request *request); unsigned int streamIndex(const Stream *stream) const { @@ -724,6 +729,372 @@ int SimpleCameraData::setupFormats(V4L2SubdeviceFormat *format, return 0; } +class MappedPixels { +public: + unsigned char *data; + PixelFormat format; + Size size; + int bpp; + int maxval; + + MappedPixels(unsigned char *_data, const struct StreamConfiguration &_config) { + data = _data; + format = _config.pixelFormat; + size = _config.size; + + switch (format) { + case formats::SGRBG8: + bpp = 1; + maxval = 255; + break; + case formats::SBGGR8: + bpp = 1; + maxval = 255; + break; + case formats::SGRBG10: + bpp = 2; + maxval = 1023; + break; + default: + LOG(SimplePipeline, Error) << "Mapped pixels " << format << " is unknown format."; + } + } + + int getValue(unsigned int x, unsigned int y) { + unsigned int v; + if (x >= size.width) + x = size.width - 1; + if (y >= size.height) + y = size.height - 1; + int i = (x + size.width * y) * bpp; + v = data[i]; + if (bpp > 1) + v |= data[i+1] << 8; + return v; + } + + int getMaxValue(unsigned int x, unsigned int y) { + int v, v2; + + v = getValue(x, y); + v2 = getValue(x+1, y); + if (v2 > v) + v = v2; + v2 = getValue(x, y+1); + if (v2 > v) + v = v2; + v2 = getValue(x+1, y+1); + if (v2 > v) + v = v2; + return v; + } + + float getMaxValueR(float x, float y) { + float v = getMaxValue(x * size.width, y * size.height); + return v/maxval; + } + + void debugPaint(void) { + char map[] = " .,:;-+=*#"; + for (float y = 0; y < 1; y += 1/25.) { + for (float x = 0; x < 1; x += 1/80.) { + float v = getMaxValueR(x, y); + printf("%c", map[ int (v * (sizeof(map) - 2)) ]); + } + printf("\n"); + } + } +}; + +LOG_DEFINE_CATEGORY(SimpleAgc) +LOG_DEFINE_CATEGORY(RPiAgc) + +// FIXME: from src/ipa/raspberrypi/controller/rpi/agc.h +struct AgcExposureMode { + std::vector shutter; + std::vector gain; + + AgcExposureMode(void) { + libcamera::utils::Duration v1(1.0); + libcamera::utils::Duration v2(1000.0); + libcamera::utils::Duration v3(1000000.0); + shutter = { v1, v2, v2, v3 }; + gain = { 1.0, 1.0, 16.0, 16.0 }; + } +}; + + +struct AgcStatus { + libcamera::utils::Duration totalExposureValue; /* value for all exposure and gain for this image */ + libcamera::utils::Duration targetExposureValue; /* (unfiltered) target total exposure AGC is aiming for */ + libcamera::utils::Duration shutterTime; + double analogueGain; + char exposureMode[32]; + char constraintMode[32]; + char meteringMode[32]; + double ev; + libcamera::utils::Duration flickerPeriod; + int floatingRegionEnable; + libcamera::utils::Duration fixedShutter; + double fixedAnalogueGain; + double digitalGain; + int locked; +}; + +class Agc { +public: + ControlList ctrls; + + int exposure_min, exposure_max; + int again_min, again_max; + int dgain_min, dgain_max; + + AgcStatus status_; + AgcExposureMode *exposureMode_; + + libcamera::utils::Duration shutter_conv; + + struct ExposureValues { + ExposureValues(); + + libcamera::utils::Duration shutter; + double analogueGain; + libcamera::utils::Duration totalExposure; + libcamera::utils::Duration totalExposureNoDG; /* without digital gain */ + }; + + struct ExposureValues current_, filtered_; + int have_ad_gain; + unsigned long cid_gain; + + Agc(std::unique_ptr & sensor_) { + /* + sudo yavta -w '0x009a0901 1' /dev/v4l-subdev0 # gc2145 + sudo yavta -w '0x009a0901 1' /dev/v4l-subdev1 # ae, ov + sudo yavta -w '0x00980912 0' /dev/v4l-subdev1 # ag, ov + sudo yavta -l /dev/v4l-subdev1 + */ + have_ad_gain = 0; + if (have_ad_gain) { + ctrls = sensor_->getControls({ V4L2_CID_EXPOSURE, V4L2_CID_ANALOGUE_GAIN, V4L2_CID_DIGITAL_GAIN }); + cid_gain = V4L2_CID_ANALOGUE_GAIN; + } else { + ctrls = sensor_->getControls({ V4L2_CID_EXPOSURE, V4L2_CID_GAIN }); + cid_gain = V4L2_CID_GAIN; + } + + const ControlInfoMap &infoMap = *ctrls.infoMap(); + + const ControlInfo &exposure_info = infoMap.find(V4L2_CID_EXPOSURE)->second; + const ControlInfo &gain_info = infoMap.find(cid_gain)->second; + const ControlInfo &dgain_info = infoMap.find(V4L2_CID_DIGITAL_GAIN)->second; + + memset(&status_, 0, sizeof(status_)); + status_.ev = 1.0; + + exposureMode_ = new AgcExposureMode(); + libcamera::utils::Duration msec(1); + shutter_conv = msec; + + exposure_min = exposure_info.min().get(); + if (!exposure_min) { + LOG(SimplePipeline, Error) << "Minimum exposure is zero, that can't be linear"; + exposure_min = 1; + } + exposure_max = exposure_info.max().get(); + again_min = gain_info.min().get(); + if (!again_min) { + LOG(SimplePipeline, Error) << "Minimum gain is zero, that can't be linear"; + again_min = 100; + } + + again_max = gain_info.max().get(); + if (have_ad_gain) { + dgain_min = dgain_info.min().get(); + dgain_max = dgain_info.max().get(); + } else { + dgain_min = 1; + dgain_max = 1; + } + + printf("Exposure %d %d, gain %d %d, dgain %d %d\n", + exposure_min, exposure_max, + again_min, again_max, + dgain_min, dgain_max); + } + + void get_exposure() { + int exposure = ctrls.get(V4L2_CID_EXPOSURE).get(); + int gain = ctrls.get(cid_gain).get(); + int dgain; + if (have_ad_gain) + dgain = ctrls.get(V4L2_CID_DIGITAL_GAIN).get(); + else + dgain = 1; + + printf("Old exp %d, gain %d, dgain %d\n", exposure, gain, dgain); + + current_.shutter = (double) exposure * shutter_conv; + current_.analogueGain = (double) gain / again_min; + } + + void set_exposure(std::unique_ptr & sensor_) { + int exposure = (int)(filtered_.shutter / shutter_conv); + int gain = (int)(filtered_.analogueGain * again_min); + printf(" new exp %d, gain %d, dgain %d ", exposure, gain, 0); + ctrls.set(V4L2_CID_EXPOSURE, exposure); + ctrls.set(cid_gain, (int)(filtered_.analogueGain * again_min)); + if (have_ad_gain) + ctrls.set(V4L2_CID_DIGITAL_GAIN, 768); + sensor_->setControls(&ctrls); + } + + void process(std::unique_ptr & sensor_, Request *request) { + for (auto [stream, buffer] : request->buffers()) { + MappedFrameBuffer mappedBuffer(buffer, MappedFrameBuffer::MapFlag::Read); + const std::vector> &planes = mappedBuffer.planes(); + unsigned char *img = planes[0].data(); + const struct StreamConfiguration &config = stream->configuration(); + MappedPixels pixels(img, config); + + //LOG(SimplePipeline, Error) << config.pixelFormat << " " << config.size; + + pixels.debugPaint(); + + int bright = 0, too_bright = 0, total = 0; + + for (float y = 0; y < 1; y += 1/30.) { + for (float x = 0; x < 1; x += 1/40.) { + float v = pixels.getMaxValueR(x, y); + + total++; + if (v > 240./255) + too_bright++; + if (v > 200./255) + bright++; + } + } + + get_exposure(); + LOG(RPiAgc, Error) << "Current values are " << current_.shutter << " and " << current_.analogueGain; + filtered_ = current_; + filtered_.totalExposureNoDG = filtered_.analogueGain * filtered_.shutter; + if ((bright / (float) total) < 0.01) { + filtered_.totalExposureNoDG *= 1.1; + printf("ADJ+"); + } + if ((too_bright / (float) total) > 0.08) { + filtered_.totalExposureNoDG *= 0.9; + printf("ADJ-"); + } + + divideUpExposure(); + set_exposure(sensor_); + //LOG(SimpleAgc, Error) << "Hello world"; + } +#if 0 + const ControlInfoMap &infoMap = controls(); + + if (infoMap.find(V4L2_CID_BRIGHTNESS) != infoMap.end()) { + //const ControlInfo &brightness = infoMap.find(V4L2_CID_BRIGHTNESS)->second; + } +#endif + } + + void divideUpExposure(); + Duration clipShutter(Duration shutter); +}; + +/* from ...agc.cpp */ +Agc::ExposureValues::ExposureValues() + : shutter(0), analogueGain(0), + totalExposure(0), totalExposureNoDG(0) +{ +} + +Duration Agc::clipShutter(Duration shutter) +{ + //if (maxShutter_) + // shutter = std::min(shutter, maxShutter_); + return shutter; +} + +void Agc::divideUpExposure() +{ + /* + * Sending the fixed shutter/gain cases through the same code may seem + * unnecessary, but it will make more sense when extend this to cover + * variable aperture. + */ + Duration exposureValue = filtered_.totalExposureNoDG; + Duration shutterTime; + double analogueGain; + shutterTime = status_.fixedShutter ? status_.fixedShutter + : exposureMode_->shutter[0]; + shutterTime = clipShutter(shutterTime); + analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain + : exposureMode_->gain[0]; + if (shutterTime * analogueGain < exposureValue) { + for (unsigned int stage = 1; + stage < exposureMode_->gain.size(); stage++) { + printf("Stage %d\n", stage); + LOG(RPiAgc, Error) << "Stage " << stage << " s/g is " << shutterTime << " and " + << analogueGain; + + if (!status_.fixedShutter) { + Duration stageShutter = + clipShutter(exposureMode_->shutter[stage]); + if (stageShutter * analogueGain >= exposureValue) { + shutterTime = exposureValue / analogueGain; + break; + } + shutterTime = stageShutter; + } + if (status_.fixedAnalogueGain == 0.0) { + if (exposureMode_->gain[stage] * shutterTime >= exposureValue) { + analogueGain = exposureValue / shutterTime; + break; + } + analogueGain = exposureMode_->gain[stage]; + } + } + } + LOG(RPiAgc, Error) << "Divided up shutter and gain are " << shutterTime << " and " + << analogueGain; + /* + * Finally adjust shutter time for flicker avoidance (require both + * shutter and gain not to be fixed). + */ + if (!status_.fixedShutter && !status_.fixedAnalogueGain && + status_.flickerPeriod) { + int flickerPeriods = shutterTime / status_.flickerPeriod; + if (flickerPeriods) { + Duration newShutterTime = flickerPeriods * status_.flickerPeriod; + analogueGain *= shutterTime / newShutterTime; + /* + * We should still not allow the ag to go over the + * largest value in the exposure mode. Note that this + * may force more of the total exposure into the digital + * gain as a side-effect. + */ + analogueGain = std::min(analogueGain, exposureMode_->gain.back()); + shutterTime = newShutterTime; + } + LOG(RPiAgc, Error) << "After flicker avoidance, shutter " + << shutterTime << " gain " << analogueGain; + } + filtered_.shutter = shutterTime; + filtered_.analogueGain = analogueGain; +} + + +void SimpleCameraData::autoProcessing(Request *request) +{ + Agc agc = Agc(sensor_); + + agc.process(sensor_, request); +} + void SimpleCameraData::bufferReady(FrameBuffer *buffer) { SimplePipelineHandler *pipe = SimpleCameraData::pipe(); @@ -823,8 +1197,10 @@ void SimpleCameraData::converterOutputDone(FrameBuffer *buffer) /* Complete the buffer and the request. */ Request *request = buffer->request(); - if (pipe->completeBuffer(request, buffer)) + if (pipe->completeBuffer(request, buffer)) { + autoProcessing(request); pipe->completeRequest(request); + } } /* Retrieve all source pads connected to a sink pad through active routes. */