Show a patch.

GET /api/1.1/patches/14261/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 14261,
    "url": "https://patchwork.libcamera.org/api/1.1/patches/14261/?format=api",
    "web_url": "https://patchwork.libcamera.org/patch/14261/",
    "project": {
        "id": 1,
        "url": "https://patchwork.libcamera.org/api/1.1/projects/1/?format=api",
        "name": "libcamera",
        "link_name": "libcamera",
        "list_id": "libcamera_core",
        "list_email": "libcamera-devel@lists.libcamera.org",
        "web_url": "",
        "scm_url": "",
        "webscm_url": ""
    },
    "msgid": "<20211022143907.3089419-3-naush@raspberrypi.com>",
    "date": "2021-10-22T14:39:03",
    "name": "[libcamera-devel,v2,2/6] pipeline: raspberrypi: Convert the pipeline handler to use media controller",
    "commit_ref": null,
    "pull_url": null,
    "state": "superseded",
    "archived": false,
    "hash": "99e6adca1318e166e92652b2f5b60c422f07c591",
    "submitter": {
        "id": 34,
        "url": "https://patchwork.libcamera.org/api/1.1/people/34/?format=api",
        "name": "Naushir Patuck",
        "email": "naush@raspberrypi.com"
    },
    "delegate": null,
    "mbox": "https://patchwork.libcamera.org/patch/14261/mbox/",
    "series": [
        {
            "id": 2650,
            "url": "https://patchwork.libcamera.org/api/1.1/series/2650/?format=api",
            "web_url": "https://patchwork.libcamera.org/project/libcamera/list/?series=2650",
            "date": "2021-10-22T14:39:01",
            "name": "Raspberry Pi: Conversion to media controller",
            "version": 2,
            "mbox": "https://patchwork.libcamera.org/series/2650/mbox/"
        }
    ],
    "comments": "https://patchwork.libcamera.org/api/patches/14261/comments/",
    "check": "pending",
    "checks": "https://patchwork.libcamera.org/api/patches/14261/checks/",
    "tags": {},
    "headers": {
        "Return-Path": "<libcamera-devel-bounces@lists.libcamera.org>",
        "X-Original-To": "parsemail@patchwork.libcamera.org",
        "Delivered-To": "parsemail@patchwork.libcamera.org",
        "Received": [
            "from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id 41A9FBDB1C\n\tfor <parsemail@patchwork.libcamera.org>;\n\tFri, 22 Oct 2021 14:40:18 +0000 (UTC)",
            "from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id 0461268F61;\n\tFri, 22 Oct 2021 16:40:18 +0200 (CEST)",
            "from mail-wm1-x329.google.com (mail-wm1-x329.google.com\n\t[IPv6:2a00:1450:4864:20::329])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 33D7A6012A\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tFri, 22 Oct 2021 16:40:15 +0200 (CEST)",
            "by mail-wm1-x329.google.com with SMTP id j205so2443434wmj.3\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tFri, 22 Oct 2021 07:40:15 -0700 (PDT)",
            "from naush-laptop.pitowers.org\n\t([2a00:1098:3142:14:f00d:ddf0:61b0:a4cd])\n\tby smtp.gmail.com with ESMTPSA id\n\tr205sm8577878wma.3.2021.10.22.07.40.13\n\t(version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256);\n\tFri, 22 Oct 2021 07:40:13 -0700 (PDT)"
        ],
        "Authentication-Results": "lancelot.ideasonboard.com;\n\tdkim=fail reason=\"signature verification failed\" (2048-bit key;\n\tunprotected) header.d=raspberrypi.com header.i=@raspberrypi.com\n\theader.b=\"GF8jST3s\"; dkim-atps=neutral",
        "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n\td=raspberrypi.com; s=google;\n\th=from:to:cc:subject:date:message-id:in-reply-to:references\n\t:mime-version:content-transfer-encoding;\n\tbh=wJpyur6FGxUGBGkl1qE65WWe1yICqyvTwggHyrkXtb8=;\n\tb=GF8jST3stXA8+VnkDBriDE5zE7JNjtPjDkzojOOyf99L7i4yTNHvgup8EP38vcXUNv\n\tsoSNPQ2Ey7U6KF25Gt4RuyYmD5E/iTXXNIw5eD1tAMXcowtpYDzd3rXwBVMNHdRoa1qq\n\t27uQBZbqQVnvw2pjgzHN/6qNSRSi+Ct/myak8sfCA3K1B7Q4pH5w/T3IOeTRbCFlS6zO\n\tMAclMvcQlI1ffAGy5DdHyZt2jdOo07G/86kV+u4Usn9B44Hk11NrZ1vy8GvppPV40jeC\n\t/FIM74Zcbo6ZM6yx2K7sFt4AGUqGZKp4VdvMoLIRKyZfLH4UXoIKQOXaH5oXyPFgWvt+\n\tBUeQ==",
        "X-Google-DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n\td=1e100.net; s=20210112;\n\th=x-gm-message-state:from:to:cc:subject:date:message-id:in-reply-to\n\t:references:mime-version:content-transfer-encoding;\n\tbh=wJpyur6FGxUGBGkl1qE65WWe1yICqyvTwggHyrkXtb8=;\n\tb=K9oScpjsu6yDoPlAHi2E6ZsdBzL8OktNFkkFzDJnKw4FrYOzWXxho80ARv+GhqiV6C\n\tPYzX2O+QAGIxc3wgLai3/1ZkI24bkokFztY9JaxWkTc3rrYtYm3J/SG+aYH2YWd8rCJS\n\ttPaE4uL7Wib7r2AaATQ3OmSux5Dib0sIXIQFhtrESGlDvTiXp0DvOWtS79NlwSQRFmxe\n\tuR4RAyvxfLz3zP2bQ3sVZaUoVGZnTRF6PpSboN+zct4/3b61MMrV7fSTRGuoyuzz73Ok\n\t5iYkiwBzCo2AOMH6wUOfNYOu3oTybzYRTRu9avNJAyhPBV4oPQVpv2mwl5v/qlRGAhAl\n\tmhGg==",
        "X-Gm-Message-State": "AOAM53196zbBMGwCmFitcjlSBVqiXJ1/U2n5+/QKf5wFviItGGinBrRv\n\tW+UiwlY0EtyIM4FN/00KSJU4WaoT8428fBUE",
        "X-Google-Smtp-Source": "ABdhPJzb+TpjEkb7PRbNgfB7Hn+2hsEK2DkWetEEm+shaQofaFxN4moIDRGodx+N9yuHMtJRRPPdPQ==",
        "X-Received": "by 2002:a1c:3b44:: with SMTP id i65mr107502wma.161.1634913614133;\n\tFri, 22 Oct 2021 07:40:14 -0700 (PDT)",
        "From": "Naushir Patuck <naush@raspberrypi.com>",
        "To": "libcamera-devel@lists.libcamera.org",
        "Date": "Fri, 22 Oct 2021 15:39:03 +0100",
        "Message-Id": "<20211022143907.3089419-3-naush@raspberrypi.com>",
        "X-Mailer": "git-send-email 2.25.1",
        "In-Reply-To": "<20211022143907.3089419-1-naush@raspberrypi.com>",
        "References": "<20211022143907.3089419-1-naush@raspberrypi.com>",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit",
        "Subject": "[libcamera-devel] [PATCH v2 2/6] pipeline: raspberrypi: Convert the\n\tpipeline handler to use media controller",
        "X-BeenThere": "libcamera-devel@lists.libcamera.org",
        "X-Mailman-Version": "2.1.29",
        "Precedence": "list",
        "List-Id": "<libcamera-devel.lists.libcamera.org>",
        "List-Unsubscribe": "<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>",
        "List-Archive": "<https://lists.libcamera.org/pipermail/libcamera-devel/>",
        "List-Post": "<mailto:libcamera-devel@lists.libcamera.org>",
        "List-Help": "<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>",
        "List-Subscribe": "<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>",
        "Errors-To": "libcamera-devel-bounces@lists.libcamera.org",
        "Sender": "\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"
    },
    "content": "Switch the pipeline handler to use the new Unicam media controller based driver.\nWith this change, we directly talk to the sensor device driver to set controls\nand set/get formats in the pipeline handler.\n\nThis change requires the accompanying Raspberry Pi linux kernel change at\nhttps://github.com/raspberrypi/linux/pull/4645. If this kernel change is not\npresent, the pipeline handler will fail to run with an error message informing\nthe user to update the kernel build.\n\nSigned-off-by: Naushir Patuck <naush@raspberrypi.com>\n---\n .../pipeline/raspberrypi/raspberrypi.cpp      | 146 +++++++++++-------\n 1 file changed, 90 insertions(+), 56 deletions(-)",
    "diff": "diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp\nindex 1634ca98f481..a31b0f81eba7 100644\n--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp\n+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp\n@@ -48,6 +48,29 @@ LOG_DEFINE_CATEGORY(RPI)\n \n namespace {\n \n+/* Map of mbus codes to supported sizes reported by the sensor. */\n+using SensorFormats = std::map<unsigned int, std::vector<Size>>;\n+\n+SensorFormats populateSensorFormats(std::unique_ptr<CameraSensor> &sensor)\n+{\n+\tSensorFormats formats;\n+\n+\tfor (auto const mbusCode : sensor->mbusCodes())\n+\t\tformats.emplace(mbusCode, sensor->sizes(mbusCode));\n+\n+\treturn formats;\n+}\n+\n+inline V4L2DeviceFormat toV4L2DeviceFormat(V4L2SubdeviceFormat &mode)\n+{\n+\tV4L2DeviceFormat deviceFormat;\n+\tBayerFormat bayer = BayerFormat::fromMbusCode(mode.mbus_code);\n+\n+\tdeviceFormat.fourcc = bayer.toV4L2PixelFormat();\n+\tdeviceFormat.size = mode.size;\n+\treturn deviceFormat;\n+}\n+\n bool isRaw(PixelFormat &pixFmt)\n {\n \t/*\n@@ -74,11 +97,10 @@ double scoreFormat(double desired, double actual)\n \treturn score;\n }\n \n-V4L2DeviceFormat findBestMode(V4L2VideoDevice::Formats &formatsMap,\n-\t\t\t      const Size &req)\n+V4L2SubdeviceFormat findBestMode(const SensorFormats &formatsMap, const Size &req)\n {\n \tdouble bestScore = std::numeric_limits<double>::max(), score;\n-\tV4L2DeviceFormat bestMode;\n+\tV4L2SubdeviceFormat bestMode;\n \n #define PENALTY_AR\t\t1500.0\n #define PENALTY_8BIT\t\t2000.0\n@@ -88,18 +110,17 @@ V4L2DeviceFormat findBestMode(V4L2VideoDevice::Formats &formatsMap,\n \n \t/* Calculate the closest/best mode from the user requested size. */\n \tfor (const auto &iter : formatsMap) {\n-\t\tV4L2PixelFormat v4l2Format = iter.first;\n-\t\tconst PixelFormatInfo &info = PixelFormatInfo::info(v4l2Format);\n+\t\tconst unsigned int mbus_code = iter.first;\n+\t\tconst PixelFormat format = BayerFormat::fromMbusCode(mbus_code).toPixelFormat();\n+\t\tconst PixelFormatInfo &info = PixelFormatInfo::info(format);\n \n-\t\tfor (const SizeRange &sz : iter.second) {\n-\t\t\tdouble modeWidth = sz.contains(req) ? req.width : sz.max.width;\n-\t\t\tdouble modeHeight = sz.contains(req) ? req.height : sz.max.height;\n+\t\tfor (const Size &sz : iter.second) {\n \t\t\tdouble reqAr = static_cast<double>(req.width) / req.height;\n-\t\t\tdouble modeAr = modeWidth / modeHeight;\n+\t\t\tdouble modeAr = sz.width / sz.height;\n \n \t\t\t/* Score the dimensions for closeness. */\n-\t\t\tscore = scoreFormat(req.width, modeWidth);\n-\t\t\tscore += scoreFormat(req.height, modeHeight);\n+\t\t\tscore = scoreFormat(req.width, sz.width);\n+\t\t\tscore += scoreFormat(req.height, sz.height);\n \t\t\tscore += PENALTY_AR * scoreFormat(reqAr, modeAr);\n \n \t\t\t/* Add any penalties... this is not an exact science! */\n@@ -115,12 +136,12 @@ V4L2DeviceFormat findBestMode(V4L2VideoDevice::Formats &formatsMap,\n \n \t\t\tif (score <= bestScore) {\n \t\t\t\tbestScore = score;\n-\t\t\t\tbestMode.fourcc = v4l2Format;\n-\t\t\t\tbestMode.size = Size(modeWidth, modeHeight);\n+\t\t\t\tbestMode.mbus_code = mbus_code;\n+\t\t\t\tbestMode.size = sz;\n \t\t\t}\n \n-\t\t\tLOG(RPI, Info) << \"Mode: \" << modeWidth << \"x\" << modeHeight\n-\t\t\t\t       << \" fmt \" << v4l2Format.toString()\n+\t\t\tLOG(RPI, Info) << \"Mode: \" << sz.width << \"x\" << sz.height\n+\t\t\t\t       << \" fmt \" << format.toString()\n \t\t\t\t       << \" Score: \" << score\n \t\t\t\t       << \" (best \" << bestScore << \")\";\n \t\t}\n@@ -170,6 +191,7 @@ public:\n \tstd::unique_ptr<ipa::RPi::IPAProxyRPi> ipa_;\n \n \tstd::unique_ptr<CameraSensor> sensor_;\n+\tSensorFormats sensorFormats_;\n \t/* Array of Unicam and ISP device streams and associated buffers/streams. */\n \tRPi::Device<Unicam, 2> unicam_;\n \tRPi::Device<Isp, 4> isp_;\n@@ -352,9 +374,9 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()\n \t\t\t * Calculate the best sensor mode we can use based on\n \t\t\t * the user request.\n \t\t\t */\n-\t\t\tV4L2VideoDevice::Formats fmts = data_->unicam_[Unicam::Image].dev()->formats();\n-\t\t\tV4L2DeviceFormat sensorFormat = findBestMode(fmts, cfg.size);\n-\t\t\tint ret = data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat);\n+\t\t\tV4L2SubdeviceFormat sensorFormat = findBestMode(data_->sensorFormats_, cfg.size);\n+\t\t\tV4L2DeviceFormat unicamFormat = toV4L2DeviceFormat(sensorFormat);\n+\t\t\tint ret = data_->unicam_[Unicam::Image].dev()->tryFormat(&unicamFormat);\n \t\t\tif (ret)\n \t\t\t\treturn Invalid;\n \n@@ -366,7 +388,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()\n \t\t\t * fetch the \"native\" (i.e. untransformed) Bayer order,\n \t\t\t * because the sensor may currently be flipped!\n \t\t\t */\n-\t\t\tV4L2PixelFormat fourcc = sensorFormat.fourcc;\n+\t\t\tV4L2PixelFormat fourcc = unicamFormat.fourcc;\n \t\t\tif (data_->flipsAlterBayerOrder_) {\n \t\t\t\tBayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc);\n \t\t\t\tbayer.order = data_->nativeBayerOrder_;\n@@ -375,15 +397,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()\n \t\t\t}\n \n \t\t\tPixelFormat sensorPixFormat = fourcc.toPixelFormat();\n-\t\t\tif (cfg.size != sensorFormat.size ||\n+\t\t\tif (cfg.size != unicamFormat.size ||\n \t\t\t    cfg.pixelFormat != sensorPixFormat) {\n-\t\t\t\tcfg.size = sensorFormat.size;\n+\t\t\t\tcfg.size = unicamFormat.size;\n \t\t\t\tcfg.pixelFormat = sensorPixFormat;\n \t\t\t\tstatus = Adjusted;\n \t\t\t}\n \n-\t\t\tcfg.stride = sensorFormat.planes[0].bpl;\n-\t\t\tcfg.frameSize = sensorFormat.planes[0].size;\n+\t\t\tcfg.stride = unicamFormat.planes[0].bpl;\n+\t\t\tcfg.frameSize = unicamFormat.planes[0].size;\n \n \t\t\trawCount++;\n \t\t} else {\n@@ -472,7 +494,8 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,\n {\n \tRPiCameraData *data = cameraData(camera);\n \tCameraConfiguration *config = new RPiCameraConfiguration(data);\n-\tV4L2DeviceFormat sensorFormat;\n+\tV4L2SubdeviceFormat sensorFormat;\n+\tV4L2DeviceFormat unicamFormat;\n \tunsigned int bufferCount;\n \tPixelFormat pixelFormat;\n \tV4L2VideoDevice::Formats fmts;\n@@ -487,9 +510,9 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,\n \t\tswitch (role) {\n \t\tcase StreamRole::Raw:\n \t\t\tsize = data->sensor_->resolution();\n-\t\t\tfmts = data->unicam_[Unicam::Image].dev()->formats();\n-\t\t\tsensorFormat = findBestMode(fmts, size);\n-\t\t\tpixelFormat = sensorFormat.fourcc.toPixelFormat();\n+\t\t\tsensorFormat = findBestMode(data->sensorFormats_, size);\n+\t\t\tunicamFormat = toV4L2DeviceFormat(sensorFormat);\n+\t\t\tpixelFormat = BayerFormat::fromMbusCode(sensorFormat.mbus_code).toPixelFormat();\n \t\t\tASSERT(pixelFormat.isValid());\n \t\t\tbufferCount = 2;\n \t\t\trawCount++;\n@@ -599,32 +622,30 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)\n \t}\n \n \t/* First calculate the best sensor mode we can use based on the user request. */\n-\tV4L2VideoDevice::Formats fmts = data->unicam_[Unicam::Image].dev()->formats();\n-\tV4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? sensorSize : maxSize);\n+\tV4L2SubdeviceFormat sensorFormat = findBestMode(data->sensorFormats_, rawStream ? sensorSize : maxSize);\n+\tV4L2DeviceFormat unicamFormat = toV4L2DeviceFormat(sensorFormat);\n+\n+\tret = data->sensor_->setFormat(&sensorFormat);\n+\tif (ret)\n+\t\treturn ret;\n \n \t/*\n \t * Unicam image output format. The ISP input format gets set at start,\n \t * just in case we have swapped bayer orders due to flips.\n \t */\n-\tret = data->unicam_[Unicam::Image].dev()->setFormat(&sensorFormat);\n+\tret = data->unicam_[Unicam::Image].dev()->setFormat(&unicamFormat);\n \tif (ret)\n \t\treturn ret;\n \n-\t/*\n-\t * The control ranges associated with the sensor may need updating\n-\t * after a format change.\n-\t * \\todo Use the CameraSensor::setFormat API instead.\n-\t */\n-\tdata->sensor_->updateControlInfo();\n-\n \tLOG(RPI, Info) << \"Sensor: \" << camera->id()\n-\t\t       << \" - Selected mode: \" << sensorFormat.toString();\n+\t\t       << \" - Selected sensor mode: \" << sensorFormat.toString()\n+\t\t       << \" - Selected unicam mode: \" << unicamFormat.toString();\n \n \t/*\n \t * This format may be reset on start() if the bayer order has changed\n \t * because of flips in the sensor.\n \t */\n-\tret = data->isp_[Isp::Input].dev()->setFormat(&sensorFormat);\n+\tret = data->isp_[Isp::Input].dev()->setFormat(&unicamFormat);\n \tif (ret)\n \t\treturn ret;\n \n@@ -746,8 +767,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)\n \tdata->ispMinCropSize_ = testCrop.size();\n \n \t/* Adjust aspect ratio by providing crops on the input image. */\n-\tSize size = sensorFormat.size.boundedToAspectRatio(maxSize);\n-\tRectangle crop = size.centeredTo(Rectangle(sensorFormat.size).center());\n+\tSize size = unicamFormat.size.boundedToAspectRatio(maxSize);\n+\tRectangle crop = size.centeredTo(Rectangle(unicamFormat.size).center());\n \tdata->ispCrop_ = crop;\n \n \tdata->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &crop);\n@@ -761,8 +782,11 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)\n \t * supports it.\n \t */\n \tif (data->sensorMetadata_) {\n-\t\tformat = {};\n+\t\tV4L2SubdeviceFormat embeddedFormat;\n+\n+\t\tdata->sensor_->device()->getFormat(1, &embeddedFormat);\n \t\tformat.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);\n+\t\tformat.planes[0].size = embeddedFormat.size.width * embeddedFormat.size.height;\n \n \t\tLOG(RPI, Debug) << \"Setting embedded data format.\";\n \t\tret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);\n@@ -847,9 +871,14 @@ int PipelineHandlerRPi::start(Camera *camera, const ControlList *controls)\n \t * IPA configure may have changed the sensor flips - hence the bayer\n \t * order. Get the sensor format and set the ISP input now.\n \t */\n-\tV4L2DeviceFormat sensorFormat;\n-\tdata->unicam_[Unicam::Image].dev()->getFormat(&sensorFormat);\n-\tret = data->isp_[Isp::Input].dev()->setFormat(&sensorFormat);\n+\tV4L2SubdeviceFormat sensorFormat;\n+\tdata->sensor_->device()->getFormat(0, &sensorFormat);\n+\n+\tV4L2DeviceFormat ispFormat;\n+\tispFormat.fourcc = BayerFormat::fromMbusCode(sensorFormat.mbus_code).toV4L2PixelFormat();\n+\tispFormat.size = sensorFormat.size;\n+\n+\tret = data->isp_[Isp::Input].dev()->setFormat(&ispFormat);\n \tif (ret) {\n \t\tstop(camera);\n \t\treturn ret;\n@@ -1004,6 +1033,8 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)\n \tif (data->sensor_->init())\n \t\treturn false;\n \n+\tdata->sensorFormats_ = populateSensorFormats(data->sensor_);\n+\n \tipa::RPi::SensorConfig sensorConfig;\n \tif (data->loadIPA(&sensorConfig)) {\n \t\tLOG(RPI, Error) << \"Failed to load a suitable IPA library\";\n@@ -1030,6 +1061,11 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)\n \t\t\treturn false;\n \t}\n \n+\tif (!(data->unicam_[Unicam::Image].dev()->caps().device_caps() & V4L2_CAP_IO_MC)) {\n+\t\tLOG(RPI, Error) << \"Unicam driver did not advertise V4L2_CAP_IO_MC, please update your kernel!\";\n+\t\treturn false;\n+\t}\n+\n \t/*\n \t * Setup our delayed control writer with the sensor default\n \t * gain and exposure delays. Mark VBLANK for priority write.\n@@ -1039,7 +1075,7 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)\n \t\t{ V4L2_CID_EXPOSURE, { sensorConfig.exposureDelay, false } },\n \t\t{ V4L2_CID_VBLANK, { sensorConfig.vblankDelay, true } }\n \t};\n-\tdata->delayedCtrls_ = std::make_unique<DelayedControls>(data->unicam_[Unicam::Image].dev(), params);\n+\tdata->delayedCtrls_ = std::make_unique<DelayedControls>(data->sensor_->device(), params);\n \tdata->sensorMetadata_ = sensorConfig.sensorMetadata;\n \n \t/* Register the controls that the Raspberry Pi IPA can handle. */\n@@ -1066,15 +1102,14 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)\n \t * As part of answering the final question, we reset the camera to\n \t * no transform at all.\n \t */\n-\n-\tV4L2VideoDevice *dev = data->unicam_[Unicam::Image].dev();\n-\tconst struct v4l2_query_ext_ctrl *hflipCtrl = dev->controlInfo(V4L2_CID_HFLIP);\n+\tconst V4L2Subdevice *sensor = data->sensor_->device();\n+\tconst struct v4l2_query_ext_ctrl *hflipCtrl = sensor->controlInfo(V4L2_CID_HFLIP);\n \tif (hflipCtrl) {\n \t\t/* We assume it will support vflips too... */\n \t\tdata->supportsFlips_ = true;\n \t\tdata->flipsAlterBayerOrder_ = hflipCtrl->flags & V4L2_CTRL_FLAG_MODIFY_LAYOUT;\n \n-\t\tControlList ctrls(dev->controls());\n+\t\tControlList ctrls(data->sensor_->controls());\n \t\tctrls.set(V4L2_CID_HFLIP, 0);\n \t\tctrls.set(V4L2_CID_VFLIP, 0);\n \t\tdata->setSensorControls(ctrls);\n@@ -1082,9 +1117,8 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)\n \n \t/* Look for a valid Bayer format. */\n \tBayerFormat bayerFormat;\n-\tfor (const auto &iter : dev->formats()) {\n-\t\tV4L2PixelFormat v4l2Format = iter.first;\n-\t\tbayerFormat = BayerFormat::fromV4L2PixelFormat(v4l2Format);\n+\tfor (const auto &iter : data->sensorFormats_) {\n+\t\tbayerFormat = BayerFormat::fromMbusCode(iter.first);\n \t\tif (bayerFormat.isValid())\n \t\t\tbreak;\n \t}\n@@ -1271,7 +1305,7 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)\n \t\t}\n \t}\n \n-\tentityControls.emplace(0, unicam_[Unicam::Image].dev()->controls());\n+\tentityControls.emplace(0, sensor_->controls());\n \tentityControls.emplace(1, isp_[Isp::Input].dev()->controls());\n \n \t/* Always send the user transform to the IPA. */\n@@ -1406,10 +1440,10 @@ void RPiCameraData::setSensorControls(ControlList &controls)\n \t\tControlList vblank_ctrl;\n \n \t\tvblank_ctrl.set(V4L2_CID_VBLANK, controls.get(V4L2_CID_VBLANK));\n-\t\tunicam_[Unicam::Image].dev()->setControls(&vblank_ctrl);\n+\t\tsensor_->setControls(&vblank_ctrl);\n \t}\n \n-\tunicam_[Unicam::Image].dev()->setControls(&controls);\n+\tsensor_->setControls(&controls);\n }\n \n void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)\n",
    "prefixes": [
        "libcamera-devel",
        "v2",
        "2/6"
    ]
}