[{"id":31450,"web_url":"https://patchwork.libcamera.org/comment/31450/","msgid":"<ef592506-68d9-4970-88f8-c874472a48f2@ideasonboard.com>","date":"2024-09-27T15:55:38","subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","submitter":{"id":86,"url":"https://patchwork.libcamera.org/api/people/86/","name":"Umang Jain","email":"umang.jain@ideasonboard.com"},"content":"Hi Kieran,\n\nThank you for the patch.\n\nOn 16/09/24 7:32 pm, Kieran Bingham wrote:\n> Provide a MediaPipeline class to help identifing and managing pipelines across\n> a MediaDevice graph.\n>\n> Signed-off-by: Kieran Bingham<kieran.bingham@ideasonboard.com>\n> ---\n>   include/libcamera/internal/media_pipeline.h |  60 ++++\n>   include/libcamera/internal/meson.build      |   1 +\n>   src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++\n>   src/libcamera/meson.build                   |   1 +\n>   4 files changed, 363 insertions(+)\n>   create mode 100644 include/libcamera/internal/media_pipeline.h\n>   create mode 100644 src/libcamera/media_pipeline.cpp\n>\n> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h\n> new file mode 100644\n> index 000000000000..be8a611e20ca\n> --- /dev/null\n> +++ b/include/libcamera/internal/media_pipeline.h\n> @@ -0,0 +1,60 @@\n> +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> +/*\n> + * Copyright (C) 2024, Ideas on Board Oy\n> + *\n> + * Media pipeline handler\n> + */\n> +\n> +#pragma once\n> +\n> +#include <list>\n> +#include <string>\n> +\n> +#include <libcamera/base/log.h>\n> +\n> +namespace libcamera {\n> +\n> +class CameraSensor;\n> +class MediaEntity;\n> +class MediaLink;\n> +class MediaPad;\n> +struct V4L2SubdeviceFormat;\n> +\n> +class MediaPipeline\n> +{\n> +public:\n> +\tint init(MediaEntity *source, std::string sink);\n> +\tint initLinks();\n> +\tint configure(CameraSensor *sensor, V4L2SubdeviceFormat *);\n> +\n> +private:\n> +\tstruct Entity {\n> +\t\t/* The media entity, always valid. */\n> +\t\tMediaEntity *entity;\n> +\t\t/*\n> +\t\t * Whether or not the entity is a subdev that supports the\n> +\t\t * routing API.\n> +\t\t */\n> +\t\tbool supportsRouting;\n> +\t\t/*\n> +\t\t * The local sink pad connected to the upstream entity, null for\n> +\t\t * the camera sensor at the beginning of the pipeline.\n> +\t\t */\n> +\t\tconst MediaPad *sink;\n> +\t\t/*\n> +\t\t * The local source pad connected to the downstream entity, null\n> +\t\t * for the video node at the end of the pipeline.\n> +\t\t */\n> +\t\tconst MediaPad *source;\n> +\t\t/*\n> +\t\t * The link on the source pad, to the downstream entity, null\n> +\t\t * for the video node at the end of the pipeline.\n> +\t\t */\n> +\t\tMediaLink *sourceLink;\n> +\t};\n> +\n> +\tstd::vector<const MediaPad *> routedSourcePads(MediaPad *sink);\n> +\tstd::list<Entity> entities_;\n> +};\n> +\n> +} /* namespace libcamera */\n> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build\n> index 1c5eef9cab80..60a35d3e0357 100644\n> --- a/include/libcamera/internal/meson.build\n> +++ b/include/libcamera/internal/meson.build\n> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([\n>       'mapped_framebuffer.h',\n>       'media_device.h',\n>       'media_object.h',\n> +    'media_pipeline.h',\n>       'pipeline_handler.h',\n>       'process.h',\n>       'pub_key.h',\n> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp\n> new file mode 100644\n> index 000000000000..e14cc1ff4773\n> --- /dev/null\n> +++ b/src/libcamera/media_pipeline.cpp\n> @@ -0,0 +1,301 @@\n> +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> +/*\n> + * Copyright (C) 2024, Ideas on Board Oy\n> + *\n> + * Media pipeline support\n> + */\n> +\n> +#include \"libcamera/internal/media_pipeline.h\"\n> +\n> +#include <algorithm>\n> +#include <errno.h>\n> +#include <queue>\n> +#include <string>\n> +#include <unordered_map>\n> +#include <unordered_set>\n> +#include <vector>\n> +\n> +#include <linux/media.h>\n> +\n> +#include <libcamera/base/log.h>\n> +\n> +#include \"libcamera/internal/camera_sensor.h\"\n> +#include \"libcamera/internal/media_device.h\"\n> +#include \"libcamera/internal/media_object.h\"\n> +#include \"libcamera/internal/v4l2_subdevice.h\"\n> +\n> +/**\n> + * \\file media_pipeline.h\n> + * \\brief Provide a representation of a pipeline of devices using the Media\n> + * Controller.\n> + */\n> +\n> +namespace libcamera {\n> +\n> +LOG_DEFINE_CATEGORY(MediaPipeline)\n> +\n> +/**\n> + * \\class MediaPipeline\n> + * \\brief The MediaPipeline represents a set of entities that together form a\n> + * data path for stream data.\n> + *\n> + * A MediaPipeline instance is constructed from a sink and a source between\n> + * two entities in a media graph.\n> + */\n> +\n> +/**\n> + * \\brief Retrieve all source pads connected to a sink pad through active routes\n> + *\n> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the\n> + * source pads which are connected with an active route to the sink pad.\n> + *\n> + * \\return A vector of source MediaPads\n> + */\n> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)\n> +{\n> +\tMediaEntity *entity = sink->entity();\n> +\tstd::unique_ptr<V4L2Subdevice> subdev =\n> +\t\tstd::make_unique<V4L2Subdevice>(entity);\n> +\n> +\tint ret = subdev->open();\n> +\tif (ret < 0)\n> +\t\treturn {};\n> +\n> +\tV4L2Subdevice::Routing routing = {};\n> +\tret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);\n> +\tif (ret < 0)\n> +\t\treturn {};\n> +\n> +\tstd::vector<const MediaPad *> pads;\n> +\n> +\tfor (const V4L2Subdevice::Route &route : routing) {\n> +\t\tif (sink->index() != route.sink.pad ||\n> +\t\t    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))\n> +\t\t\tcontinue;\n> +\n> +\t\tconst MediaPad *pad = entity->getPadByIndex(route.source.pad);\n> +\t\tif (!pad) {\n> +\t\t\tLOG(MediaPipeline, Warning)\n> +\t\t\t\t<< \"Entity \" << entity->name()\n> +\t\t\t\t<< \" has invalid route source pad \"\n> +\t\t\t\t<< route.source.pad;\n> +\t\t}\n> +\n> +\t\tpads.push_back(pad);\n> +\t}\n> +\n> +\treturn pads;\n> +}\n> +\n> +/**\n> + * \\brief Find the path from source to sink\n> + *\n> + * Starting from a source entity, deteremine the shortest path to the\n\ns/deteremine/determine\n> + * target described by sink.\n> + *\n> + * If sink can not be found, or a route from source to sink can not be achieved\n> + * an error of -ENOLINK will be returned.\n> + *\n> + * When successful, the MediaPipeline will internally store the representation\n> + * of entities and links to describe the path between the two entities.\n> + *\n> + * \\return 0 on success, a negative errno otherwise\n> + */\n> +int MediaPipeline::init(MediaEntity *source, std::string sink)\n> +{\n> +\t/*\n> +\t * Find the shortest path between from the Camera Sensor and the\n> +\t * target entity.\n> +\t */\n> +\tstd::unordered_set<MediaEntity *> visited;\n> +\tstd::queue<std::tuple<MediaEntity *, MediaPad *>> queue;\n> +\n> +\t/* Remember at each entity where we came from. */\n> +\tstd::unordered_map<MediaEntity *, Entity> parents;\n> +\tMediaEntity *entity = nullptr;\n> +\tMediaEntity *target = nullptr;\n> +\tMediaPad *sinkPad;\n> +\n> +\tqueue.push({ source, nullptr });\n> +\n> +\twhile (!queue.empty()) {\n> +\t\tstd::tie(entity, sinkPad) = queue.front();\n> +\t\tqueue.pop();\n> +\n> +\t\t/* Found the target device. */\n> +\t\tif (entity->name() == sink) {\n> +\t\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t\t<< \"Found Pipeline target \" << entity->name();\n> +\t\t\ttarget = entity;\n> +\t\t\tbreak;\n> +\t\t}\n> +\n> +\t\tvisited.insert(entity);\n> +\n> +\t\t/*\n> +\t\t * Add direct downstream entities to the search queue. If the\n> +\t\t * current entity supports the subdev internal routing API,\n> +\t\t * restrict the search to downstream entities reachable through\n> +\t\t * active routes.\n> +\t\t */\n> +\n> +\t\tstd::vector<const MediaPad *> pads;\n\nall these are source pads, so I would be suggest `srcPads`\n> +\t\tbool supportsRouting = false;\n> +\n> +\t\tif (sinkPad) {\n> +\t\t\tpads = routedSourcePads(sinkPad);\n> +\t\t\tif (!pads.empty())\n> +\t\t\t\tsupportsRouting = true;\n> +\t\t}\n> +\n> +\t\tif (pads.empty()) {\n> +\t\t\tfor (const MediaPad *pad : entity->pads()) {\n> +\t\t\t\tif (!(pad->flags() & MEDIA_PAD_FL_SOURCE))\n> +\t\t\t\t\tcontinue;\n> +\t\t\t\tpads.push_back(pad);\n> +\t\t\t}\n> +\t\t}\n> +\n> +\t\tfor (const MediaPad *pad : pads) {\n\nhence, I would suggest\n\n+\t\tfor (const MediaPad *srcPad : srcPads) {\n\n> +\t\t\tfor (MediaLink *link : pad->links()) {\n> +\t\t\t\tMediaEntity *next = link->sink()->entity();\n> +\t\t\t\tif (visited.find(next) == visited.end()) {\n> +\t\t\t\t\tqueue.push({ next, link->sink() });\n> +\n> +\t\t\t\t\tEntity e{ entity, supportsRouting, sinkPad, pad, link };\n> +\t\t\t\t\tparents.insert({ next, e });\n> +\t\t\t\t}\n> +\t\t\t}\n> +\t\t}\n> +\t}\n> +\n> +\tif (!target) {\n> +\t\tLOG(MediaPipeline, Error) << \"Failed to connect \" << source->name();\n> +\t\treturn -ENOLINK;\n> +\t}\n> +\n> +\t/*\n> +\t * With the parents, we can follow back our way from the capture device\n> +\t * to the sensor. Store all the entities in the pipeline, from the\n> +\t * camera sensor to the video node, in entities_.\n> +\t */\n> +\tentities_.push_front({ entity, false, sinkPad, nullptr, nullptr });\n> +\n> +\tfor (auto it = parents.find(entity); it != parents.end();\n> +\t     it = parents.find(entity)) {\n> +\t\tconst Entity &e = it->second;\n> +\t\tentities_.push_front(e);\n> +\t\tentity = e.entity;\n> +\t}\n> +\n> +\tLOG(MediaPipeline, Info)\n> +\t\t<< \"Found pipeline: \"\n> +\t\t<< utils::join(entities_, \" -> \",\n> +\t\t\t       [](const Entity &e) {\n> +\t\t\t\t       std::string s = \"[\";\n> +\t\t\t\t       if (e.sink)\n> +\t\t\t\t\t       s += std::to_string(e.sink->index()) + \"|\";\n> +\t\t\t\t       s += e.entity->name();\n> +\t\t\t\t       if (e.source)\n> +\t\t\t\t\t       s += \"|\" + std::to_string(e.source->index());\n> +\t\t\t\t       s += \"]\";\n> +\t\t\t\t       return s;\n> +\t\t\t       });\n> +\n> +\treturn 0;\n> +}\n> +\n> +/**\n> + * \\brief Initialise and enable all links through the MediaPipeline\n> + * \\return 0 on success, or a negative errno otherwise\n> + */\n> +int MediaPipeline::initLinks()\n> +{\n> +\tint ret = 0;\n> +\n> +\tMediaLink *sinkLink = nullptr;\n> +\tfor (Entity &e : entities_) {\n> +\t\t/* Sensor entities have no connected sink. */\n> +\t\tif (!sinkLink) {\n> +\t\t\tsinkLink = e.sourceLink;\n> +\t\t\tcontinue;\n> +\t\t}\n> +\n> +\t\tLOG(MediaPipeline, Debug) << \"Enabling : \" << *sinkLink;\n\nShould this be logged inside the `if` block below, possibly after the \nsetEnabled is successful ?\n\nOtherwise, this will be logged for all sinkLink(s) which are already \nMEDIA_LNK_FL_ENABLED\n> +\n> +\t\tif (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {\n> +\t\t\tret = sinkLink->setEnabled(true);\n> +\t\t\tif (ret < 0)\n> +\t\t\t\treturn ret;\n> +\t\t}\n> +\n> +\t\tsinkLink = e.sourceLink;\n> +\t}\n> +\n> +\treturn ret;\n> +}\n> +\n> +/**\n> + * \\brief Configure the entities of this MediaPipeline\n> + *\n> + * Propagate formats through each of the entities of the Pipeline, validating\n> + * that each one was not adjusted by the driver from the desired format.\n> + *\n> + * \\return 0 on success or a negative errno otherwise\n> + */\n> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)\n> +{\n> +\tint ret;\n> +\n> +\tfor (const Entity &e : entities_) {\n> +\t\t/* The sensor is configured through the CameraSensor */\n> +\t\tif (!e.sourceLink)\n> +\t\t\tbreak;\n> +\n> +\t\tMediaLink *link = e.sourceLink;\n> +\t\tMediaPad *source = link->source();\n> +\t\tMediaPad *sink = link->sink();\n> +\n> +\t\t/* 'format' already contains the sensor configuration */\n\nThis is only true for the first iteration, 'format' will get updated as \nyou traverse down the pipeline, no ?\n\nThis comment either needs an update or should be removed.\n> +\t\tif (source->entity() != sensor->entity()) {\n> +\t\t\t/* Todo: Add MediaDevice cache to reduce FD pressure */\n> +\t\t\tV4L2Subdevice subdev(source->entity());\n> +\t\t\tret = subdev.open();\n> +\t\t\tif (ret)\n> +\t\t\t\treturn ret;\n> +\n> +\t\t\tret = subdev.getFormat(source->index(), format);\n\nI feel a bit uncomfortable here. 'format' is a sensor format and can get \nupdated here ?\n> +\t\t\tif (ret < 0)\n> +\t\t\t\treturn ret;\n> +\t\t}\n> +\n> +\t\tV4L2SubdeviceFormat sourceFormat = *format;\n> +\t\t/* Todo: Add MediaDevice cache to reduce FD pressure */\n> +\t\tV4L2Subdevice subdev(sink->entity());\n> +\t\tret = subdev.open();\n> +\t\tif (ret)\n> +\t\t\treturn ret;\n> +\n> +\t\tret = subdev.setFormat(sink->index(), format);\n\nand can get updated here as well ? How will it reflect for the caller of \nthe function, if the sensor format pointer gets updated here.\n\nI think the scope for 'format' should be local to this function. Both \nthe parameters passed to the function should become const.\n> +\t\tif (ret < 0)\n> +\t\t\treturn ret;\n> +\n> +\t\tif (format->code != sourceFormat.code ||\n> +\t\t    format->size != sourceFormat.size) {\n> +\t\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t\t<< \"Source '\" << *source\n> +\t\t\t\t<< \" produces \" << sourceFormat\n> +\t\t\t\t<< \", sink '\" << *sink\n> +\t\t\t\t<< \" requires \" << format;\n> +\t\t\treturn -EINVAL;\n> +\t\t}\n> +\n> +\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t<< \"Link \" << *link << \" configured with format \"\n> +\t\t\t<< format;\n> +\t}\n> +\n> +\treturn 0;\n> +}\n> +\n> +} /* namespace libcamera */\n> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build\n> index aa9ab0291854..2c0f8603b231 100644\n> --- a/src/libcamera/meson.build\n> +++ b/src/libcamera/meson.build\n> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([\n>       'mapped_framebuffer.cpp',\n>       'media_device.cpp',\n>       'media_object.cpp',\n> +    'media_pipeline.cpp',\n>       'pipeline_handler.cpp',\n>       'process.cpp',\n>       'pub_key.cpp',","headers":{"Return-Path":"<libcamera-devel-bounces@lists.libcamera.org>","X-Original-To":"parsemail@patchwork.libcamera.org","Delivered-To":"parsemail@patchwork.libcamera.org","Received":["from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id 91728C0F1B\n\tfor <parsemail@patchwork.libcamera.org>;\n\tFri, 27 Sep 2024 15:55:45 +0000 (UTC)","from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id 608616350F;\n\tFri, 27 Sep 2024 17:55:44 +0200 (CEST)","from perceval.ideasonboard.com (perceval.ideasonboard.com\n\t[IPv6:2001:4b98:dc2:55:216:3eff:fef7:d647])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 5A086634F2\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tFri, 27 Sep 2024 17:55:43 +0200 (CEST)","from [IPV6:2405:201:2015:f873:c173:4b:4a04:3a21] (unknown\n\t[IPv6:2405:201:2015:f873:c173:4b:4a04:3a21])\n\tby perceval.ideasonboard.com (Postfix) with ESMTPSA id ECECC163;\n\tFri, 27 Sep 2024 17:54:13 +0200 (CEST)"],"Authentication-Results":"lancelot.ideasonboard.com; dkim=pass (1024-bit key;\n\tunprotected) header.d=ideasonboard.com header.i=@ideasonboard.com\n\theader.b=\"iWpvoqpA\"; dkim-atps=neutral","DKIM-Signature":"v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com;\n\ts=mail; t=1727452454;\n\tbh=E7F/qb5PeylZ5InXtvixAqnfUpuU81utYmOgZpDQWiY=;\n\th=Date:From:Subject:To:References:In-Reply-To:From;\n\tb=iWpvoqpA31cNTRLPQ/iv0mZZmuMeM8tHwr0ieByVByGTmMZmxTrhS94Nat9csLzs+\n\txlO3fJUNo/zAQOFYPh4XyGsNdZCoSXyef/TSlygZg36edDWOoJRuaypd48rim3VZ9V\n\tJg/j44JUZZiHeSI+cye+bAqpZoKFhx2yKUiNXmLM=","Message-ID":"<ef592506-68d9-4970-88f8-c874472a48f2@ideasonboard.com>","Date":"Fri, 27 Sep 2024 21:25:38 +0530","MIME-Version":"1.0","User-Agent":"Mozilla Thunderbird","From":"Umang Jain <umang.jain@ideasonboard.com>","Subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","To":"Kieran Bingham <kieran.bingham@ideasonboard.com>,\n\tlibcamera devel <libcamera-devel@lists.libcamera.org>","References":"<20240916140241.47845-1-kieran.bingham@ideasonboard.com>\n\t<20240916140241.47845-4-kieran.bingham@ideasonboard.com>","Content-Language":"en-US","In-Reply-To":"<20240916140241.47845-4-kieran.bingham@ideasonboard.com>","Content-Type":"text/plain; charset=UTF-8; format=flowed","Content-Transfer-Encoding":"7bit","X-BeenThere":"libcamera-devel@lists.libcamera.org","X-Mailman-Version":"2.1.29","Precedence":"list","List-Id":"<libcamera-devel.lists.libcamera.org>","List-Unsubscribe":"<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>","List-Archive":"<https://lists.libcamera.org/pipermail/libcamera-devel/>","List-Post":"<mailto:libcamera-devel@lists.libcamera.org>","List-Help":"<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>","List-Subscribe":"<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>","Errors-To":"libcamera-devel-bounces@lists.libcamera.org","Sender":"\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"}},{"id":31456,"web_url":"https://patchwork.libcamera.org/comment/31456/","msgid":"<kkvf7kx3jufxeqlp4bqcheijx2tqpjhik3b2u4ggkc27ha4slc@c3zaiharvg4f>","date":"2024-09-27T20:40:27","subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","submitter":{"id":143,"url":"https://patchwork.libcamera.org/api/people/143/","name":"Jacopo Mondi","email":"jacopo.mondi@ideasonboard.com"},"content":"Hi Kieran\n\nOn Mon, Sep 16, 2024 at 04:02:40PM GMT, Kieran Bingham wrote:\n> Provide a MediaPipeline class to help identifing and managing pipelines across\n> a MediaDevice graph.\n>\n> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>\n> ---\n>  include/libcamera/internal/media_pipeline.h |  60 ++++\n>  include/libcamera/internal/meson.build      |   1 +\n>  src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++\n>  src/libcamera/meson.build                   |   1 +\n>  4 files changed, 363 insertions(+)\n>  create mode 100644 include/libcamera/internal/media_pipeline.h\n>  create mode 100644 src/libcamera/media_pipeline.cpp\n>\n> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h\n> new file mode 100644\n> index 000000000000..be8a611e20ca\n> --- /dev/null\n> +++ b/include/libcamera/internal/media_pipeline.h\n> @@ -0,0 +1,60 @@\n> +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> +/*\n> + * Copyright (C) 2024, Ideas on Board Oy\n> + *\n> + * Media pipeline handler\n> + */\n> +\n> +#pragma once\n> +\n> +#include <list>\n> +#include <string>\n\nmissing <vector>\n\n> +\n> +#include <libcamera/base/log.h>\n\nWhat for ?\n\n> +\n> +namespace libcamera {\n> +\n> +class CameraSensor;\n> +class MediaEntity;\n> +class MediaLink;\n> +class MediaPad;\n> +struct V4L2SubdeviceFormat;\n> +\n> +class MediaPipeline\n> +{\n> +public:\n> +\tint init(MediaEntity *source, std::string sink);\n> +\tint initLinks();\n> +\tint configure(CameraSensor *sensor, V4L2SubdeviceFormat *);\n> +\n> +private:\n> +\tstruct Entity {\n> +\t\t/* The media entity, always valid. */\n> +\t\tMediaEntity *entity;\n> +\t\t/*\n> +\t\t * Whether or not the entity is a subdev that supports the\n> +\t\t * routing API.\n> +\t\t */\n> +\t\tbool supportsRouting;\n> +\t\t/*\n> +\t\t * The local sink pad connected to the upstream entity, null for\n> +\t\t * the camera sensor at the beginning of the pipeline.\n> +\t\t */\n> +\t\tconst MediaPad *sink;\n> +\t\t/*\n> +\t\t * The local source pad connected to the downstream entity, null\n> +\t\t * for the video node at the end of the pipeline.\n> +\t\t */\n> +\t\tconst MediaPad *source;\n> +\t\t/*\n> +\t\t * The link on the source pad, to the downstream entity, null\n> +\t\t * for the video node at the end of the pipeline.\n> +\t\t */\n> +\t\tMediaLink *sourceLink;\n> +\t};\n> +\n> +\tstd::vector<const MediaPad *> routedSourcePads(MediaPad *sink);\n> +\tstd::list<Entity> entities_;\n> +};\n> +\n> +} /* namespace libcamera */\n> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build\n> index 1c5eef9cab80..60a35d3e0357 100644\n> --- a/include/libcamera/internal/meson.build\n> +++ b/include/libcamera/internal/meson.build\n> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([\n>      'mapped_framebuffer.h',\n>      'media_device.h',\n>      'media_object.h',\n> +    'media_pipeline.h',\n>      'pipeline_handler.h',\n>      'process.h',\n>      'pub_key.h',\n> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp\n> new file mode 100644\n> index 000000000000..e14cc1ff4773\n> --- /dev/null\n> +++ b/src/libcamera/media_pipeline.cpp\n> @@ -0,0 +1,301 @@\n> +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> +/*\n> + * Copyright (C) 2024, Ideas on Board Oy\n> + *\n> + * Media pipeline support\n> + */\n> +\n> +#include \"libcamera/internal/media_pipeline.h\"\n> +\n> +#include <algorithm>\n> +#include <errno.h>\n\nmissing <memory> for unique_ptr<>\nmissing <tuple> for std::tuple and std::tie\n\n> +#include <queue>\n> +#include <string>\n\nIncluded in the header\n\n> +#include <unordered_map>\n> +#include <unordered_set>\n> +#include <vector>\n\nWill be included in the header\n\n> +\n> +#include <linux/media.h>\n> +\n> +#include <libcamera/base/log.h>\n> +\n> +#include \"libcamera/internal/camera_sensor.h\"\n> +#include \"libcamera/internal/media_device.h\"\n> +#include \"libcamera/internal/media_object.h\"\n> +#include \"libcamera/internal/v4l2_subdevice.h\"\n> +\n> +/**\n> + * \\file media_pipeline.h\n> + * \\brief Provide a representation of a pipeline of devices using the Media\n> + * Controller.\n\nnot '.' at the end of briefs\n\n> + */\n> +\n> +namespace libcamera {\n> +\n> +LOG_DEFINE_CATEGORY(MediaPipeline)\n> +\n> +/**\n> + * \\class MediaPipeline\n> + * \\brief The MediaPipeline represents a set of entities that together form a\n> + * data path for stream data.\n\nno '.' at the end of briefs\n\nwhat a \"stream data\" ? Should it be \"data stream\" ?\n\n> + *\n> + * A MediaPipeline instance is constructed from a sink and a source between\n> + * two entities in a media graph.\n\nDoesn't a pipeline spans for the whole media graph instead than just\nbetween two entities ?\n\n> + */\n> +\n> +/**\n> + * \\brief Retrieve all source pads connected to a sink pad through active routes\n\n\\param[in] sink\n\nI wonder why I don't get a warning from doxygen..\n\n> + *\n> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the\n> + * source pads which are connected with an active route to the sink pad.\n> + *\n> + * \\return A vector of source MediaPads\n> + */\n> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)\n> +{\n> +\tMediaEntity *entity = sink->entity();\n> +\tstd::unique_ptr<V4L2Subdevice> subdev =\n> +\t\tstd::make_unique<V4L2Subdevice>(entity);\n> +\n> +\tint ret = subdev->open();\n> +\tif (ret < 0)\n> +\t\treturn {};\n> +\n> +\tV4L2Subdevice::Routing routing = {};\n> +\tret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);\n> +\tif (ret < 0)\n> +\t\treturn {};\n> +\n> +\tstd::vector<const MediaPad *> pads;\n> +\n> +\tfor (const V4L2Subdevice::Route &route : routing) {\n> +\t\tif (sink->index() != route.sink.pad ||\n> +\t\t    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))\n> +\t\t\tcontinue;\n> +\n> +\t\tconst MediaPad *pad = entity->getPadByIndex(route.source.pad);\n> +\t\tif (!pad) {\n> +\t\t\tLOG(MediaPipeline, Warning)\n> +\t\t\t\t<< \"Entity \" << entity->name()\n> +\t\t\t\t<< \" has invalid route source pad \"\n> +\t\t\t\t<< route.source.pad;\n> +\t\t}\n> +\n> +\t\tpads.push_back(pad);\n> +\t}\n> +\n> +\treturn pads;\n> +}\n> +\n> +/**\n> + * \\brief Find the path from source to sink\n\nmissing \\param\n\n> + *\n> + * Starting from a source entity, deteremine the shortest path to the\n\ns/deteremine/determine\n\n> + * target described by sink.\n\ns/target described by// ?\n\n> + *\n> + * If sink can not be found, or a route from source to sink can not be achieved\n> + * an error of -ENOLINK will be returned.\n> + *\n> + * When successful, the MediaPipeline will internally store the representation\n> + * of entities and links to describe the path between the two entities.\n> + *\n> + * \\return 0 on success, a negative errno otherwise\n> + */\n> +int MediaPipeline::init(MediaEntity *source, std::string sink)\n> +{\n> +\t/*\n> +\t * Find the shortest path between from the Camera Sensor and the\n> +\t * target entity.\n\nI know where this comment comes from, but this is not a CameraSensor,\nmaybe just use \"source\" ?\n\n> +\t */\n> +\tstd::unordered_set<MediaEntity *> visited;\n> +\tstd::queue<std::tuple<MediaEntity *, MediaPad *>> queue;\n> +\n> +\t/* Remember at each entity where we came from. */\n> +\tstd::unordered_map<MediaEntity *, Entity> parents;\n> +\tMediaEntity *entity = nullptr;\n> +\tMediaEntity *target = nullptr;\n> +\tMediaPad *sinkPad;\n> +\n> +\tqueue.push({ source, nullptr });\n> +\n> +\twhile (!queue.empty()) {\n> +\t\tstd::tie(entity, sinkPad) = queue.front();\n> +\t\tqueue.pop();\n> +\n> +\t\t/* Found the target device. */\n> +\t\tif (entity->name() == sink) {\n> +\t\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t\t<< \"Found Pipeline target \" << entity->name();\n\ns/Pipeline/pipeline\n\n> +\t\t\ttarget = entity;\n> +\t\t\tbreak;\n> +\t\t}\n> +\n> +\t\tvisited.insert(entity);\n> +\n> +\t\t/*\n> +\t\t * Add direct downstream entities to the search queue. If the\n> +\t\t * current entity supports the subdev internal routing API,\n> +\t\t * restrict the search to downstream entities reachable through\n> +\t\t * active routes.\n> +\t\t */\n> +\n> +\t\tstd::vector<const MediaPad *> pads;\n> +\t\tbool supportsRouting = false;\n> +\n> +\t\tif (sinkPad) {\n> +\t\t\tpads = routedSourcePads(sinkPad);\n> +\t\t\tif (!pads.empty())\n> +\t\t\t\tsupportsRouting = true;\n> +\t\t}\n> +\n> +\t\tif (pads.empty()) {\n> +\t\t\tfor (const MediaPad *pad : entity->pads()) {\n> +\t\t\t\tif (!(pad->flags() & MEDIA_PAD_FL_SOURCE))\n> +\t\t\t\t\tcontinue;\n> +\t\t\t\tpads.push_back(pad);\n> +\t\t\t}\n> +\t\t}\n> +\n> +\t\tfor (const MediaPad *pad : pads) {\n> +\t\t\tfor (MediaLink *link : pad->links()) {\n> +\t\t\t\tMediaEntity *next = link->sink()->entity();\n> +\t\t\t\tif (visited.find(next) == visited.end()) {\n> +\t\t\t\t\tqueue.push({ next, link->sink() });\n> +\n> +\t\t\t\t\tEntity e{ entity, supportsRouting, sinkPad, pad, link };\n> +\t\t\t\t\tparents.insert({ next, e });\n> +\t\t\t\t}\n> +\t\t\t}\n> +\t\t}\n> +\t}\n> +\n> +\tif (!target) {\n> +\t\tLOG(MediaPipeline, Error) << \"Failed to connect \" << source->name();\n> +\t\treturn -ENOLINK;\n> +\t}\n> +\n> +\t/*\n> +\t * With the parents, we can follow back our way from the capture device\n> +\t * to the sensor. Store all the entities in the pipeline, from the\n> +\t * camera sensor to the video node, in entities_.\n> +\t */\n> +\tentities_.push_front({ entity, false, sinkPad, nullptr, nullptr });\n> +\n> +\tfor (auto it = parents.find(entity); it != parents.end();\n> +\t     it = parents.find(entity)) {\n> +\t\tconst Entity &e = it->second;\n> +\t\tentities_.push_front(e);\n> +\t\tentity = e.entity;\n> +\t}\n\nI'll skip review assuming this is a copy of the implementation in\nsimple.cpp\n\n> +\n> +\tLOG(MediaPipeline, Info)\n> +\t\t<< \"Found pipeline: \"\n> +\t\t<< utils::join(entities_, \" -> \",\n> +\t\t\t       [](const Entity &e) {\n> +\t\t\t\t       std::string s = \"[\";\n> +\t\t\t\t       if (e.sink)\n> +\t\t\t\t\t       s += std::to_string(e.sink->index()) + \"|\";\n> +\t\t\t\t       s += e.entity->name();\n> +\t\t\t\t       if (e.source)\n> +\t\t\t\t\t       s += \"|\" + std::to_string(e.source->index());\n> +\t\t\t\t       s += \"]\";\n> +\t\t\t\t       return s;\n> +\t\t\t       });\n> +\n> +\treturn 0;\n> +}\n> +\n> +/**\n> + * \\brief Initialise and enable all links through the MediaPipeline\n> + * \\return 0 on success, or a negative errno otherwise\n> + */\n> +int MediaPipeline::initLinks()\n> +{\n> +\tint ret = 0;\n> +\n> +\tMediaLink *sinkLink = nullptr;\n> +\tfor (Entity &e : entities_) {\n> +\t\t/* Sensor entities have no connected sink. */\n> +\t\tif (!sinkLink) {\n> +\t\t\tsinkLink = e.sourceLink;\n> +\t\t\tcontinue;\n> +\t\t}\n> +\n> +\t\tLOG(MediaPipeline, Debug) << \"Enabling : \" << *sinkLink;\n> +\n> +\t\tif (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {\n> +\t\t\tret = sinkLink->setEnabled(true);\n> +\t\t\tif (ret < 0)\n> +\t\t\t\treturn ret;\n> +\t\t}\n> +\n> +\t\tsinkLink = e.sourceLink;\n> +\t}\n> +\n> +\treturn ret;\n\nI presume you can return 0 and declare ret inside the above loop\n\n> +}\n> +\n> +/**\n> + * \\brief Configure the entities of this MediaPipeline\n\nmissing \\params\n\n> + *\n> + * Propagate formats through each of the entities of the Pipeline, validating\n> + * that each one was not adjusted by the driver from the desired format.\n> + *\n> + * \\return 0 on success or a negative errno otherwise\n> + */\n> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)\n> +{\n> +\tint ret;\n> +\n> +\tfor (const Entity &e : entities_) {\n> +\t\t/* The sensor is configured through the CameraSensor */\n> +\t\tif (!e.sourceLink)\n> +\t\t\tbreak;\n> +\n> +\t\tMediaLink *link = e.sourceLink;\n> +\t\tMediaPad *source = link->source();\n> +\t\tMediaPad *sink = link->sink();\n> +\n> +\t\t/* 'format' already contains the sensor configuration */\n> +\t\tif (source->entity() != sensor->entity()) {\n> +\t\t\t/* Todo: Add MediaDevice cache to reduce FD pressure */\n\ns/Todo/\\todo/\n\n> +\t\t\tV4L2Subdevice subdev(source->entity());\n> +\t\t\tret = subdev.open();\n\nTrying to open a subdev multiple times return -EBUSY, if this function\nis meant to be used by pipeline handlers they should be careful in\navoiding contentions.\n\n> +\t\t\tif (ret)\n> +\t\t\t\treturn ret;\n> +\n> +\t\t\tret = subdev.getFormat(source->index(), format);\n> +\t\t\tif (ret < 0)\n> +\t\t\t\treturn ret;\n> +\t\t}\n> +\n> +\t\tV4L2SubdeviceFormat sourceFormat = *format;\n> +\t\t/* Todo: Add MediaDevice cache to reduce FD pressure */\n\ns/Todo/\\todo/\n\n> +\t\tV4L2Subdevice subdev(sink->entity());\n> +\t\tret = subdev.open();\n> +\t\tif (ret)\n> +\t\t\treturn ret;\n> +\n> +\t\tret = subdev.setFormat(sink->index(), format);\n> +\t\tif (ret < 0)\n> +\t\t\treturn ret;\n> +\n> +\t\tif (format->code != sourceFormat.code ||\n> +\t\t    format->size != sourceFormat.size) {\n> +\t\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t\t<< \"Source '\" << *source\n> +\t\t\t\t<< \" produces \" << sourceFormat\n> +\t\t\t\t<< \", sink '\" << *sink\n> +\t\t\t\t<< \" requires \" << format;\n> +\t\t\treturn -EINVAL;\n> +\t\t}\n> +\n> +\t\tLOG(MediaPipeline, Debug)\n> +\t\t\t<< \"Link \" << *link << \" configured with format \"\n> +\t\t\t<< format;\n> +\t}\n> +\n> +\treturn 0;\n> +}\n> +\n> +} /* namespace libcamera */\n> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build\n> index aa9ab0291854..2c0f8603b231 100644\n> --- a/src/libcamera/meson.build\n> +++ b/src/libcamera/meson.build\n> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([\n>      'mapped_framebuffer.cpp',\n>      'media_device.cpp',\n>      'media_object.cpp',\n> +    'media_pipeline.cpp',\n>      'pipeline_handler.cpp',\n>      'process.cpp',\n>      'pub_key.cpp',\n> --\n> 2.46.0\n>","headers":{"Return-Path":"<libcamera-devel-bounces@lists.libcamera.org>","X-Original-To":"parsemail@patchwork.libcamera.org","Delivered-To":"parsemail@patchwork.libcamera.org","Received":["from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id 84DA3C3257\n\tfor <parsemail@patchwork.libcamera.org>;\n\tFri, 27 Sep 2024 20:40:36 +0000 (UTC)","from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id 6A8566350F;\n\tFri, 27 Sep 2024 22:40:35 +0200 (CEST)","from perceval.ideasonboard.com (perceval.ideasonboard.com\n\t[IPv6:2001:4b98:dc2:55:216:3eff:fef7:d647])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 44D9C634F2\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tFri, 27 Sep 2024 22:40:33 +0200 (CEST)","from ideasonboard.com (mob-5-91-122-74.net.vodafone.it\n\t[5.91.122.74])\n\tby perceval.ideasonboard.com (Postfix) with ESMTPSA id D5AC783D;\n\tFri, 27 Sep 2024 22:39:03 +0200 (CEST)"],"Authentication-Results":"lancelot.ideasonboard.com; dkim=pass (1024-bit key;\n\tunprotected) header.d=ideasonboard.com header.i=@ideasonboard.com\n\theader.b=\"QuVa9AAJ\"; dkim-atps=neutral","DKIM-Signature":"v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com;\n\ts=mail; t=1727469544;\n\tbh=ligllbtLgRmsH7jIis0/prnxdmKHZhbHyvKQ0GkEsJI=;\n\th=Date:From:To:Cc:Subject:References:In-Reply-To:From;\n\tb=QuVa9AAJYQ92Rk/cTZST6gAOdhabZ76pAG90MKwyTe6nvkLCDUaIlTI4OQTEjHnvr\n\tuHurEuacyfI3QGx8KD3jRmuDtp2sKq0I+VCY3RmpQb7c3ojvjnEUYSXf/b8TqRI//b\n\tImlVBjEXCYH0YtbRDDcsZOYKh6ysCbWQENTbwe44=","Date":"Fri, 27 Sep 2024 22:40:27 +0200","From":"Jacopo Mondi <jacopo.mondi@ideasonboard.com>","To":"Kieran Bingham <kieran.bingham@ideasonboard.com>","Cc":"libcamera devel <libcamera-devel@lists.libcamera.org>","Subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","Message-ID":"<kkvf7kx3jufxeqlp4bqcheijx2tqpjhik3b2u4ggkc27ha4slc@c3zaiharvg4f>","References":"<20240916140241.47845-1-kieran.bingham@ideasonboard.com>\n\t<20240916140241.47845-4-kieran.bingham@ideasonboard.com>","MIME-Version":"1.0","Content-Type":"text/plain; charset=utf-8","Content-Disposition":"inline","In-Reply-To":"<20240916140241.47845-4-kieran.bingham@ideasonboard.com>","X-BeenThere":"libcamera-devel@lists.libcamera.org","X-Mailman-Version":"2.1.29","Precedence":"list","List-Id":"<libcamera-devel.lists.libcamera.org>","List-Unsubscribe":"<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>","List-Archive":"<https://lists.libcamera.org/pipermail/libcamera-devel/>","List-Post":"<mailto:libcamera-devel@lists.libcamera.org>","List-Help":"<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>","List-Subscribe":"<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>","Errors-To":"libcamera-devel-bounces@lists.libcamera.org","Sender":"\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"}},{"id":31529,"web_url":"https://patchwork.libcamera.org/comment/31529/","msgid":"<172788111152.532453.1715219755598871369@ping.linuxembedded.co.uk>","date":"2024-10-02T14:58:31","subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","submitter":{"id":4,"url":"https://patchwork.libcamera.org/api/people/4/","name":"Kieran Bingham","email":"kieran.bingham@ideasonboard.com"},"content":"Quoting Umang Jain (2024-09-27 16:55:38)\n> \n> Hi Kieran,\n> \n> Thank you for the patch.\n> \n> On 16/09/24 7:32 pm, Kieran Bingham wrote:\n> > Provide a MediaPipeline class to help identifing and managing pipelines across\n> > a MediaDevice graph.\n> >\n> > Signed-off-by: Kieran Bingham<kieran.bingham@ideasonboard.com>\n> > ---\n> >   include/libcamera/internal/media_pipeline.h |  60 ++++\n> >   include/libcamera/internal/meson.build      |   1 +\n> >   src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++\n> >   src/libcamera/meson.build                   |   1 +\n> >   4 files changed, 363 insertions(+)\n> >   create mode 100644 include/libcamera/internal/media_pipeline.h\n> >   create mode 100644 src/libcamera/media_pipeline.cpp\n> >\n> > diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h\n> > new file mode 100644\n> > index 000000000000..be8a611e20ca\n> > --- /dev/null\n> > +++ b/include/libcamera/internal/media_pipeline.h\n> > @@ -0,0 +1,60 @@\n> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> > +/*\n> > + * Copyright (C) 2024, Ideas on Board Oy\n> > + *\n> > + * Media pipeline handler\n> > + */\n> > +\n> > +#pragma once\n> > +\n> > +#include <list>\n> > +#include <string>\n> > +\n> > +#include <libcamera/base/log.h>\n> > +\n> > +namespace libcamera {\n> > +\n> > +class CameraSensor;\n> > +class MediaEntity;\n> > +class MediaLink;\n> > +class MediaPad;\n> > +struct V4L2SubdeviceFormat;\n> > +\n> > +class MediaPipeline\n> > +{\n> > +public:\n> > +     int init(MediaEntity *source, std::string sink);\n> > +     int initLinks();\n> > +     int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);\n> > +\n> > +private:\n> > +     struct Entity {\n> > +             /* The media entity, always valid. */\n> > +             MediaEntity *entity;\n> > +             /*\n> > +              * Whether or not the entity is a subdev that supports the\n> > +              * routing API.\n> > +              */\n> > +             bool supportsRouting;\n> > +             /*\n> > +              * The local sink pad connected to the upstream entity, null for\n> > +              * the camera sensor at the beginning of the pipeline.\n> > +              */\n> > +             const MediaPad *sink;\n> > +             /*\n> > +              * The local source pad connected to the downstream entity, null\n> > +              * for the video node at the end of the pipeline.\n> > +              */\n> > +             const MediaPad *source;\n> > +             /*\n> > +              * The link on the source pad, to the downstream entity, null\n> > +              * for the video node at the end of the pipeline.\n> > +              */\n> > +             MediaLink *sourceLink;\n> > +     };\n> > +\n> > +     std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);\n> > +     std::list<Entity> entities_;\n> > +};\n> > +\n> > +} /* namespace libcamera */\n> > diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build\n> > index 1c5eef9cab80..60a35d3e0357 100644\n> > --- a/include/libcamera/internal/meson.build\n> > +++ b/include/libcamera/internal/meson.build\n> > @@ -30,6 +30,7 @@ libcamera_internal_headers = files([\n> >       'mapped_framebuffer.h',\n> >       'media_device.h',\n> >       'media_object.h',\n> > +    'media_pipeline.h',\n> >       'pipeline_handler.h',\n> >       'process.h',\n> >       'pub_key.h',\n> > diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp\n> > new file mode 100644\n> > index 000000000000..e14cc1ff4773\n> > --- /dev/null\n> > +++ b/src/libcamera/media_pipeline.cpp\n> > @@ -0,0 +1,301 @@\n> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> > +/*\n> > + * Copyright (C) 2024, Ideas on Board Oy\n> > + *\n> > + * Media pipeline support\n> > + */\n> > +\n> > +#include \"libcamera/internal/media_pipeline.h\"\n> > +\n> > +#include <algorithm>\n> > +#include <errno.h>\n> > +#include <queue>\n> > +#include <string>\n> > +#include <unordered_map>\n> > +#include <unordered_set>\n> > +#include <vector>\n> > +\n> > +#include <linux/media.h>\n> > +\n> > +#include <libcamera/base/log.h>\n> > +\n> > +#include \"libcamera/internal/camera_sensor.h\"\n> > +#include \"libcamera/internal/media_device.h\"\n> > +#include \"libcamera/internal/media_object.h\"\n> > +#include \"libcamera/internal/v4l2_subdevice.h\"\n> > +\n> > +/**\n> > + * \\file media_pipeline.h\n> > + * \\brief Provide a representation of a pipeline of devices using the Media\n> > + * Controller.\n> > + */\n> > +\n> > +namespace libcamera {\n> > +\n> > +LOG_DEFINE_CATEGORY(MediaPipeline)\n> > +\n> > +/**\n> > + * \\class MediaPipeline\n> > + * \\brief The MediaPipeline represents a set of entities that together form a\n> > + * data path for stream data.\n> > + *\n> > + * A MediaPipeline instance is constructed from a sink and a source between\n> > + * two entities in a media graph.\n> > + */\n> > +\n> > +/**\n> > + * \\brief Retrieve all source pads connected to a sink pad through active routes\n> > + *\n> > + * Examine the entity using the V4L2 Subdevice Routing API to collect all the\n> > + * source pads which are connected with an active route to the sink pad.\n> > + *\n> > + * \\return A vector of source MediaPads\n> > + */\n> > +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)\n> > +{\n> > +     MediaEntity *entity = sink->entity();\n> > +     std::unique_ptr<V4L2Subdevice> subdev =\n> > +             std::make_unique<V4L2Subdevice>(entity);\n> > +\n> > +     int ret = subdev->open();\n> > +     if (ret < 0)\n> > +             return {};\n> > +\n> > +     V4L2Subdevice::Routing routing = {};\n> > +     ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);\n> > +     if (ret < 0)\n> > +             return {};\n> > +\n> > +     std::vector<const MediaPad *> pads;\n> > +\n> > +     for (const V4L2Subdevice::Route &route : routing) {\n> > +             if (sink->index() != route.sink.pad ||\n> > +                 !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))\n> > +                     continue;\n> > +\n> > +             const MediaPad *pad = entity->getPadByIndex(route.source.pad);\n> > +             if (!pad) {\n> > +                     LOG(MediaPipeline, Warning)\n> > +                             << \"Entity \" << entity->name()\n> > +                             << \" has invalid route source pad \"\n> > +                             << route.source.pad;\n> > +             }\n> > +\n> > +             pads.push_back(pad);\n> > +     }\n> > +\n> > +     return pads;\n> > +}\n> > +\n> > +/**\n> > + * \\brief Find the path from source to sink\n> > + *\n> > + * Starting from a source entity, deteremine the shortest path to the\n> \n> s/deteremine/determine\n\nAck.\n\n> > + * target described by sink.\n> > + *\n> > + * If sink can not be found, or a route from source to sink can not be achieved\n> > + * an error of -ENOLINK will be returned.\n> > + *\n> > + * When successful, the MediaPipeline will internally store the representation\n> > + * of entities and links to describe the path between the two entities.\n> > + *\n> > + * \\return 0 on success, a negative errno otherwise\n> > + */\n> > +int MediaPipeline::init(MediaEntity *source, std::string sink)\n> > +{\n> > +     /*\n> > +      * Find the shortest path between from the Camera Sensor and the\n> > +      * target entity.\n> > +      */\n> > +     std::unordered_set<MediaEntity *> visited;\n> > +     std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;\n> > +\n> > +     /* Remember at each entity where we came from. */\n> > +     std::unordered_map<MediaEntity *, Entity> parents;\n> > +     MediaEntity *entity = nullptr;\n> > +     MediaEntity *target = nullptr;\n> > +     MediaPad *sinkPad;\n> > +\n> > +     queue.push({ source, nullptr });\n> > +\n> > +     while (!queue.empty()) {\n> > +             std::tie(entity, sinkPad) = queue.front();\n> > +             queue.pop();\n> > +\n> > +             /* Found the target device. */\n> > +             if (entity->name() == sink) {\n> > +                     LOG(MediaPipeline, Debug)\n> > +                             << \"Found Pipeline target \" << entity->name();\n> > +                     target = entity;\n> > +                     break;\n> > +             }\n> > +\n> > +             visited.insert(entity);\n> > +\n> > +             /*\n> > +              * Add direct downstream entities to the search queue. If the\n> > +              * current entity supports the subdev internal routing API,\n> > +              * restrict the search to downstream entities reachable through\n> > +              * active routes.\n> > +              */\n> > +\n> > +             std::vector<const MediaPad *> pads;\n> \n> all these are source pads, so I would be suggest `srcPads`\n\nAck\n\n> > +             bool supportsRouting = false;\n> > +\n> > +             if (sinkPad) {\n> > +                     pads = routedSourcePads(sinkPad);\n> > +                     if (!pads.empty())\n> > +                             supportsRouting = true;\n> > +             }\n> > +\n> > +             if (pads.empty()) {\n> > +                     for (const MediaPad *pad : entity->pads()) {\n> > +                             if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))\n> > +                                     continue;\n> > +                             pads.push_back(pad);\n> > +                     }\n> > +             }\n> > +\n> > +             for (const MediaPad *pad : pads) {\n> \n> hence, I would suggest\n> \n> +               for (const MediaPad *srcPad : srcPads) {\n\nAck\n\n> > +                     for (MediaLink *link : pad->links()) {\n> > +                             MediaEntity *next = link->sink()->entity();\n> > +                             if (visited.find(next) == visited.end()) {\n> > +                                     queue.push({ next, link->sink() });\n> > +\n> > +                                     Entity e{ entity, supportsRouting, sinkPad, pad, link };\n> > +                                     parents.insert({ next, e });\n> > +                             }\n> > +                     }\n> > +             }\n> > +     }\n> > +\n> > +     if (!target) {\n> > +             LOG(MediaPipeline, Error) << \"Failed to connect \" << source->name();\n> > +             return -ENOLINK;\n> > +     }\n> > +\n> > +     /*\n> > +      * With the parents, we can follow back our way from the capture device\n> > +      * to the sensor. Store all the entities in the pipeline, from the\n> > +      * camera sensor to the video node, in entities_.\n> > +      */\n> > +     entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });\n> > +\n> > +     for (auto it = parents.find(entity); it != parents.end();\n> > +          it = parents.find(entity)) {\n> > +             const Entity &e = it->second;\n> > +             entities_.push_front(e);\n> > +             entity = e.entity;\n> > +     }\n> > +\n> > +     LOG(MediaPipeline, Info)\n> > +             << \"Found pipeline: \"\n> > +             << utils::join(entities_, \" -> \",\n> > +                            [](const Entity &e) {\n> > +                                    std::string s = \"[\";\n> > +                                    if (e.sink)\n> > +                                            s += std::to_string(e.sink->index()) + \"|\";\n> > +                                    s += e.entity->name();\n> > +                                    if (e.source)\n> > +                                            s += \"|\" + std::to_string(e.source->index());\n> > +                                    s += \"]\";\n> > +                                    return s;\n> > +                            });\n> > +\n> > +     return 0;\n> > +}\n> > +\n> > +/**\n> > + * \\brief Initialise and enable all links through the MediaPipeline\n> > + * \\return 0 on success, or a negative errno otherwise\n> > + */\n> > +int MediaPipeline::initLinks()\n> > +{\n> > +     int ret = 0;\n> > +\n> > +     MediaLink *sinkLink = nullptr;\n> > +     for (Entity &e : entities_) {\n> > +             /* Sensor entities have no connected sink. */\n> > +             if (!sinkLink) {\n> > +                     sinkLink = e.sourceLink;\n> > +                     continue;\n> > +             }\n> > +\n> > +             LOG(MediaPipeline, Debug) << \"Enabling : \" << *sinkLink;\n> \n> Should this be logged inside the `if` block below, possibly after the \n> setEnabled is successful ?\n> \n> Otherwise, this will be logged for all sinkLink(s) which are already \n> MEDIA_LNK_FL_ENABLED\n\nI think it's ok to say the report on debug anyway, but I agree it's more\nclear to only report links that it's actually making a change on.\n\nI've moved it.\n\n> > +\n> > +             if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {\n> > +                     ret = sinkLink->setEnabled(true);\n> > +                     if (ret < 0)\n> > +                             return ret;\n> > +             }\n> > +\n> > +             sinkLink = e.sourceLink;\n> > +     }\n> > +\n> > +     return ret;\n> > +}\n> > +\n> > +/**\n> > + * \\brief Configure the entities of this MediaPipeline\n> > + *\n> > + * Propagate formats through each of the entities of the Pipeline, validating\n> > + * that each one was not adjusted by the driver from the desired format.\n> > + *\n> > + * \\return 0 on success or a negative errno otherwise\n> > + */\n> > +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)\n> > +{\n> > +     int ret;\n> > +\n> > +     for (const Entity &e : entities_) {\n> > +             /* The sensor is configured through the CameraSensor */\n> > +             if (!e.sourceLink)\n> > +                     break;\n> > +\n> > +             MediaLink *link = e.sourceLink;\n> > +             MediaPad *source = link->source();\n> > +             MediaPad *sink = link->sink();\n> > +\n> > +             /* 'format' already contains the sensor configuration */\n> \n> This is only true for the first iteration, 'format' will get updated as \n> you traverse down the pipeline, no ?\n> \n> This comment either needs an update or should be removed.\n\nCorrect - it's only on the first iteration.\n\n> > +             if (source->entity() != sensor->entity()) {\n> > +                     /* Todo: Add MediaDevice cache to reduce FD pressure */\n> > +                     V4L2Subdevice subdev(source->entity());\n> > +                     ret = subdev.open();\n> > +                     if (ret)\n> > +                             return ret;\n> > +\n> > +                     ret = subdev.getFormat(source->index(), format);\n> \n> I feel a bit uncomfortable here. 'format' is a sensor format and can get \n> updated here ?\n\nIt's the format that we're propogating forwards through the pipeline.\n\nIt's ok for it to get changed, and desirable that it should tell the\ncaller what it finished upon at the end.\n\nWe can't return the final format configured on the end of the Pipeline\nas we use the return value for errors.\n\n\n> > +                     if (ret < 0)\n> > +                             return ret;\n> > +             }\n> > +\n> > +             V4L2SubdeviceFormat sourceFormat = *format;\n> > +             /* Todo: Add MediaDevice cache to reduce FD pressure */\n> > +             V4L2Subdevice subdev(sink->entity());\n> > +             ret = subdev.open();\n> > +             if (ret)\n> > +                     return ret;\n> > +\n> > +             ret = subdev.setFormat(sink->index(), format);\n> \n> and can get updated here as well ? How will it reflect for the caller of \n> the function, if the sensor format pointer gets updated here.\n> \n> I think the scope for 'format' should be local to this function. Both \n> the parameters passed to the function should become const.\n\nThe purpose of this is to help with format propogation from the start of\na pipeline to the end, so it's intentional.\n\nI'll see if I can make that clearer in the function documentation or\ncommit messages or class documentation.\n\n\n\n> > +             if (ret < 0)\n> > +                     return ret;\n> > +\n> > +             if (format->code != sourceFormat.code ||\n> > +                 format->size != sourceFormat.size) {\n> > +                     LOG(MediaPipeline, Debug)\n> > +                             << \"Source '\" << *source\n> > +                             << \" produces \" << sourceFormat\n> > +                             << \", sink '\" << *sink\n> > +                             << \" requires \" << format;\n> > +                     return -EINVAL;\n> > +             }\n> > +\n> > +             LOG(MediaPipeline, Debug)\n> > +                     << \"Link \" << *link << \" configured with format \"\n> > +                     << format;\n> > +     }\n> > +\n> > +     return 0;\n> > +}\n> > +\n> > +} /* namespace libcamera */\n> > diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build\n> > index aa9ab0291854..2c0f8603b231 100644\n> > --- a/src/libcamera/meson.build\n> > +++ b/src/libcamera/meson.build\n> > @@ -41,6 +41,7 @@ libcamera_internal_sources = files([\n> >       'mapped_framebuffer.cpp',\n> >       'media_device.cpp',\n> >       'media_object.cpp',\n> > +    'media_pipeline.cpp',\n> >       'pipeline_handler.cpp',\n> >       'process.cpp',\n> >       'pub_key.cpp',\n>","headers":{"Return-Path":"<libcamera-devel-bounces@lists.libcamera.org>","X-Original-To":"parsemail@patchwork.libcamera.org","Delivered-To":"parsemail@patchwork.libcamera.org","Received":["from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id 22918BD80A\n\tfor <parsemail@patchwork.libcamera.org>;\n\tWed,  2 Oct 2024 14:58:38 +0000 (UTC)","from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id E8DE763510;\n\tWed,  2 Oct 2024 16:58:36 +0200 (CEST)","from perceval.ideasonboard.com (perceval.ideasonboard.com\n\t[IPv6:2001:4b98:dc2:55:216:3eff:fef7:d647])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 590F363510\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tWed,  2 Oct 2024 16:58:35 +0200 (CEST)","from pendragon.ideasonboard.com\n\t(cpc89244-aztw30-2-0-cust6594.18-1.cable.virginm.net [86.31.185.195])\n\tby perceval.ideasonboard.com (Postfix) with ESMTPSA id 6A3BA5A5;\n\tWed,  2 Oct 2024 16:57:02 +0200 (CEST)"],"Authentication-Results":"lancelot.ideasonboard.com; dkim=pass (1024-bit key;\n\tunprotected) header.d=ideasonboard.com header.i=@ideasonboard.com\n\theader.b=\"EVML28HX\"; dkim-atps=neutral","DKIM-Signature":"v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com;\n\ts=mail; t=1727881022;\n\tbh=f+MNMgP107FfoXVjENOt1orYsNdwluouhR70SvHJ9fk=;\n\th=In-Reply-To:References:Subject:From:To:Date:From;\n\tb=EVML28HX7Z0axfrmPvyptjQ9+8cko3VYHDzCt/2ziOOj9Wk/IEGa+ydq4APavSQfR\n\tJBYT0rtuIBwznzWc+k1fgXRzHbeeY8bPYIQiFUA4mJ3GoL8h1yFuA9T+rK+1ITaoFG\n\tLcIlY0BqPSnCtYhQpzNVhW9yq0ZCWOajAFZANqs4=","Content-Type":"text/plain; charset=\"utf-8\"","MIME-Version":"1.0","Content-Transfer-Encoding":"quoted-printable","In-Reply-To":"<ef592506-68d9-4970-88f8-c874472a48f2@ideasonboard.com>","References":"<20240916140241.47845-1-kieran.bingham@ideasonboard.com>\n\t<20240916140241.47845-4-kieran.bingham@ideasonboard.com>\n\t<ef592506-68d9-4970-88f8-c874472a48f2@ideasonboard.com>","Subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","From":"Kieran Bingham <kieran.bingham@ideasonboard.com>","To":"Umang Jain <umang.jain@ideasonboard.com>,\n\tlibcamera devel <libcamera-devel@lists.libcamera.org>","Date":"Wed, 02 Oct 2024 15:58:31 +0100","Message-ID":"<172788111152.532453.1715219755598871369@ping.linuxembedded.co.uk>","User-Agent":"alot/0.10","X-BeenThere":"libcamera-devel@lists.libcamera.org","X-Mailman-Version":"2.1.29","Precedence":"list","List-Id":"<libcamera-devel.lists.libcamera.org>","List-Unsubscribe":"<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>","List-Archive":"<https://lists.libcamera.org/pipermail/libcamera-devel/>","List-Post":"<mailto:libcamera-devel@lists.libcamera.org>","List-Help":"<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>","List-Subscribe":"<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>","Errors-To":"libcamera-devel-bounces@lists.libcamera.org","Sender":"\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"}},{"id":31568,"web_url":"https://patchwork.libcamera.org/comment/31568/","msgid":"<172796886726.532453.10591206661391731330@ping.linuxembedded.co.uk>","date":"2024-10-03T15:21:07","subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","submitter":{"id":4,"url":"https://patchwork.libcamera.org/api/people/4/","name":"Kieran Bingham","email":"kieran.bingham@ideasonboard.com"},"content":"I just found this unsent response which I wrote while updating the patch\nfor v2.\n\nI've already sent v2, but now sending this (late) for posterity... and\nto perhaps pretend that I did answer questions before I sent v2 ... :-)\n\n--\nKieran\n\nQuoting Jacopo Mondi (2024-09-27 21:40:27)\n> Hi Kieran\n> \n> On Mon, Sep 16, 2024 at 04:02:40PM GMT, Kieran Bingham wrote:\n> > Provide a MediaPipeline class to help identifing and managing pipelines across\n> > a MediaDevice graph.\n> >\n> > Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>\n> > ---\n> >  include/libcamera/internal/media_pipeline.h |  60 ++++\n> >  include/libcamera/internal/meson.build      |   1 +\n> >  src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++\n> >  src/libcamera/meson.build                   |   1 +\n> >  4 files changed, 363 insertions(+)\n> >  create mode 100644 include/libcamera/internal/media_pipeline.h\n> >  create mode 100644 src/libcamera/media_pipeline.cpp\n> >\n> > diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h\n> > new file mode 100644\n> > index 000000000000..be8a611e20ca\n> > --- /dev/null\n> > +++ b/include/libcamera/internal/media_pipeline.h\n> > @@ -0,0 +1,60 @@\n> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> > +/*\n> > + * Copyright (C) 2024, Ideas on Board Oy\n> > + *\n> > + * Media pipeline handler\n> > + */\n> > +\n> > +#pragma once\n> > +\n> > +#include <list>\n> > +#include <string>\n> \n> missing <vector>\n> \n\nAdded,\n\n> > +\n> > +#include <libcamera/base/log.h>\n> \n> What for ?\n\nMoved\n\n> > +\n> > +namespace libcamera {\n> > +\n> > +class CameraSensor;\n> > +class MediaEntity;\n> > +class MediaLink;\n> > +class MediaPad;\n> > +struct V4L2SubdeviceFormat;\n> > +\n> > +class MediaPipeline\n> > +{\n> > +public:\n> > +     int init(MediaEntity *source, std::string sink);\n> > +     int initLinks();\n> > +     int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);\n> > +\n> > +private:\n> > +     struct Entity {\n> > +             /* The media entity, always valid. */\n> > +             MediaEntity *entity;\n> > +             /*\n> > +              * Whether or not the entity is a subdev that supports the\n> > +              * routing API.\n> > +              */\n> > +             bool supportsRouting;\n> > +             /*\n> > +              * The local sink pad connected to the upstream entity, null for\n> > +              * the camera sensor at the beginning of the pipeline.\n> > +              */\n> > +             const MediaPad *sink;\n> > +             /*\n> > +              * The local source pad connected to the downstream entity, null\n> > +              * for the video node at the end of the pipeline.\n> > +              */\n> > +             const MediaPad *source;\n> > +             /*\n> > +              * The link on the source pad, to the downstream entity, null\n> > +              * for the video node at the end of the pipeline.\n> > +              */\n> > +             MediaLink *sourceLink;\n> > +     };\n> > +\n> > +     std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);\n> > +     std::list<Entity> entities_;\n> > +};\n> > +\n> > +} /* namespace libcamera */\n> > diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build\n> > index 1c5eef9cab80..60a35d3e0357 100644\n> > --- a/include/libcamera/internal/meson.build\n> > +++ b/include/libcamera/internal/meson.build\n> > @@ -30,6 +30,7 @@ libcamera_internal_headers = files([\n> >      'mapped_framebuffer.h',\n> >      'media_device.h',\n> >      'media_object.h',\n> > +    'media_pipeline.h',\n> >      'pipeline_handler.h',\n> >      'process.h',\n> >      'pub_key.h',\n> > diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp\n> > new file mode 100644\n> > index 000000000000..e14cc1ff4773\n> > --- /dev/null\n> > +++ b/src/libcamera/media_pipeline.cpp\n> > @@ -0,0 +1,301 @@\n> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */\n> > +/*\n> > + * Copyright (C) 2024, Ideas on Board Oy\n> > + *\n> > + * Media pipeline support\n> > + */\n> > +\n> > +#include \"libcamera/internal/media_pipeline.h\"\n> > +\n> > +#include <algorithm>\n> > +#include <errno.h>\n> \n> missing <memory> for unique_ptr<>\n> missing <tuple> for std::tuple and std::tie\n> \n> > +#include <queue>\n> > +#include <string>\n> \n> Included in the header\n> \n> > +#include <unordered_map>\n> > +#include <unordered_set>\n> > +#include <vector>\n> \n> Will be included in the header\n> \n> > +\n> > +#include <linux/media.h>\n> > +\n> > +#include <libcamera/base/log.h>\n> > +\n> > +#include \"libcamera/internal/camera_sensor.h\"\n> > +#include \"libcamera/internal/media_device.h\"\n> > +#include \"libcamera/internal/media_object.h\"\n> > +#include \"libcamera/internal/v4l2_subdevice.h\"\n> > +\n> > +/**\n> > + * \\file media_pipeline.h\n> > + * \\brief Provide a representation of a pipeline of devices using the Media\n> > + * Controller.\n> \n> not '.' at the end of briefs\n> \n> > + */\n> > +\n> > +namespace libcamera {\n> > +\n> > +LOG_DEFINE_CATEGORY(MediaPipeline)\n> > +\n> > +/**\n> > + * \\class MediaPipeline\n> > + * \\brief The MediaPipeline represents a set of entities that together form a\n> > + * data path for stream data.\n> \n> no '.' at the end of briefs\n\nAbove all handled.\n\n> \n> what a \"stream data\" ? Should it be \"data stream\" ?\n\nHrm ... that would be a data path for a data stream?\n\nPerhaps it could be \"form a path for a data stream\"?\n\nBut neither of those sound better to me :-(\n\nIn my head - it's still the 'data-path' for 'stream-data'...\n\nWould hyphens help ?\n\n\n> > + *\n> > + * A MediaPipeline instance is constructed from a sink and a source between\n> > + * two entities in a media graph.\n> \n> Doesn't a pipeline spans for the whole media graph instead than just\n> between two entities ?\n\nNo - this can be constructed between any two (connected) points. It\ndoesn't have to be the full graph.\n\nFor instance, in the RKISP1 you'll see that the MediaPipeline is only\nused for everything from the Sensor *up to* the CSI2 receiver. After\nthat - the RKISP1 pipeline handler manages the rest directly.\n\nBut this component handles all of the possible input paths even if we\nwere to add in 5 video mux devices in a long chain, or add 4 cameras to\na single CSI2 receiver through the Arducam 4 camera multiplexor for\ninstance.\n\n\n> \n> > + */\n> > +\n> > +/**\n> > + * \\brief Retrieve all source pads connected to a sink pad through active routes\n> \n> \\param[in] sink\n> \n> I wonder why I don't get a warning from doxygen..\n\nHrm ... is it not being built by doxy ?\n\nLooks like I've missed params all the way through.\n\n\n> > + *\n> > + * Examine the entity using the V4L2 Subdevice Routing API to collect all the\n> > + * source pads which are connected with an active route to the sink pad.\n> > + *\n> > + * \\return A vector of source MediaPads\n> > + */\n> > +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)\n> > +{\n> > +     MediaEntity *entity = sink->entity();\n> > +     std::unique_ptr<V4L2Subdevice> subdev =\n> > +             std::make_unique<V4L2Subdevice>(entity);\n> > +\n> > +     int ret = subdev->open();\n> > +     if (ret < 0)\n> > +             return {};\n> > +\n> > +     V4L2Subdevice::Routing routing = {};\n> > +     ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);\n> > +     if (ret < 0)\n> > +             return {};\n> > +\n> > +     std::vector<const MediaPad *> pads;\n> > +\n> > +     for (const V4L2Subdevice::Route &route : routing) {\n> > +             if (sink->index() != route.sink.pad ||\n> > +                 !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))\n> > +                     continue;\n> > +\n> > +             const MediaPad *pad = entity->getPadByIndex(route.source.pad);\n> > +             if (!pad) {\n> > +                     LOG(MediaPipeline, Warning)\n> > +                             << \"Entity \" << entity->name()\n> > +                             << \" has invalid route source pad \"\n> > +                             << route.source.pad;\n> > +             }\n> > +\n> > +             pads.push_back(pad);\n> > +     }\n> > +\n> > +     return pads;\n> > +}\n> > +\n> > +/**\n> > + * \\brief Find the path from source to sink\n> \n> missing \\param\n> \n> > + *\n> > + * Starting from a source entity, deteremine the shortest path to the\n> \n> s/deteremine/determine\n> \n> > + * target described by sink.\n> \n> s/target described by// ?\n\n'sink' is a string name, so it's only the description it's not the\nentity.\n\n> \n> > + *\n> > + * If sink can not be found, or a route from source to sink can not be achieved\n> > + * an error of -ENOLINK will be returned.\n> > + *\n> > + * When successful, the MediaPipeline will internally store the representation\n> > + * of entities and links to describe the path between the two entities.\n> > + *\n> > + * \\return 0 on success, a negative errno otherwise\n> > + */\n> > +int MediaPipeline::init(MediaEntity *source, std::string sink)\n> > +{\n> > +     /*\n> > +      * Find the shortest path between from the Camera Sensor and the\n> > +      * target entity.\n> \n> I know where this comment comes from, but this is not a CameraSensor,\n> maybe just use \"source\" ?\n\nAck.\n\n> \n> > +      */\n> > +     std::unordered_set<MediaEntity *> visited;\n> > +     std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;\n> > +\n> > +     /* Remember at each entity where we came from. */\n> > +     std::unordered_map<MediaEntity *, Entity> parents;\n> > +     MediaEntity *entity = nullptr;\n> > +     MediaEntity *target = nullptr;\n> > +     MediaPad *sinkPad;\n> > +\n> > +     queue.push({ source, nullptr });\n> > +\n> > +     while (!queue.empty()) {\n> > +             std::tie(entity, sinkPad) = queue.front();\n> > +             queue.pop();\n> > +\n> > +             /* Found the target device. */\n> > +             if (entity->name() == sink) {\n> > +                     LOG(MediaPipeline, Debug)\n> > +                             << \"Found Pipeline target \" << entity->name();\n> \n> s/Pipeline/pipeline\n> \n> > +                     target = entity;\n> > +                     break;\n> > +             }\n> > +\n> > +             visited.insert(entity);\n> > +\n> > +             /*\n> > +              * Add direct downstream entities to the search queue. If the\n> > +              * current entity supports the subdev internal routing API,\n> > +              * restrict the search to downstream entities reachable through\n> > +              * active routes.\n> > +              */\n> > +\n> > +             std::vector<const MediaPad *> pads;\n> > +             bool supportsRouting = false;\n> > +\n> > +             if (sinkPad) {\n> > +                     pads = routedSourcePads(sinkPad);\n> > +                     if (!pads.empty())\n> > +                             supportsRouting = true;\n> > +             }\n> > +\n> > +             if (pads.empty()) {\n> > +                     for (const MediaPad *pad : entity->pads()) {\n> > +                             if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))\n> > +                                     continue;\n> > +                             pads.push_back(pad);\n> > +                     }\n> > +             }\n> > +\n> > +             for (const MediaPad *pad : pads) {\n> > +                     for (MediaLink *link : pad->links()) {\n> > +                             MediaEntity *next = link->sink()->entity();\n> > +                             if (visited.find(next) == visited.end()) {\n> > +                                     queue.push({ next, link->sink() });\n> > +\n> > +                                     Entity e{ entity, supportsRouting, sinkPad, pad, link };\n> > +                                     parents.insert({ next, e });\n> > +                             }\n> > +                     }\n> > +             }\n> > +     }\n> > +\n> > +     if (!target) {\n> > +             LOG(MediaPipeline, Error) << \"Failed to connect \" << source->name();\n> > +             return -ENOLINK;\n> > +     }\n> > +\n> > +     /*\n> > +      * With the parents, we can follow back our way from the capture device\n> > +      * to the sensor. Store all the entities in the pipeline, from the\n> > +      * camera sensor to the video node, in entities_.\n> > +      */\n> > +     entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });\n> > +\n> > +     for (auto it = parents.find(entity); it != parents.end();\n> > +          it = parents.find(entity)) {\n> > +             const Entity &e = it->second;\n> > +             entities_.push_front(e);\n> > +             entity = e.entity;\n> > +     }\n> \n> I'll skip review assuming this is a copy of the implementation in\n> simple.cpp\n\nYes, I also plan/hope to replace the simple.cpp implementation by being\nable to call the MediaPipeline helpers - but converting that is a bit\nmore involved than just adding support to the RKISP1 for the moment.\n\n> > +\n> > +     LOG(MediaPipeline, Info)\n> > +             << \"Found pipeline: \"\n> > +             << utils::join(entities_, \" -> \",\n> > +                            [](const Entity &e) {\n> > +                                    std::string s = \"[\";\n> > +                                    if (e.sink)\n> > +                                            s += std::to_string(e.sink->index()) + \"|\";\n> > +                                    s += e.entity->name();\n> > +                                    if (e.source)\n> > +                                            s += \"|\" + std::to_string(e.source->index());\n> > +                                    s += \"]\";\n> > +                                    return s;\n> > +                            });\n> > +\n> > +     return 0;\n> > +}\n> > +\n> > +/**\n> > + * \\brief Initialise and enable all links through the MediaPipeline\n> > + * \\return 0 on success, or a negative errno otherwise\n> > + */\n> > +int MediaPipeline::initLinks()\n> > +{\n> > +     int ret = 0;\n> > +\n> > +     MediaLink *sinkLink = nullptr;\n> > +     for (Entity &e : entities_) {\n> > +             /* Sensor entities have no connected sink. */\n> > +             if (!sinkLink) {\n> > +                     sinkLink = e.sourceLink;\n> > +                     continue;\n> > +             }\n> > +\n> > +             LOG(MediaPipeline, Debug) << \"Enabling : \" << *sinkLink;\n> > +\n> > +             if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {\n> > +                     ret = sinkLink->setEnabled(true);\n> > +                     if (ret < 0)\n> > +                             return ret;\n> > +             }\n> > +\n> > +             sinkLink = e.sourceLink;\n> > +     }\n> > +\n> > +     return ret;\n> \n> I presume you can return 0 and declare ret inside the above loop\n\nack\n\n> \n> > +}\n> > +\n> > +/**\n> > + * \\brief Configure the entities of this MediaPipeline\n> \n> missing \\params\n> \n> > + *\n> > + * Propagate formats through each of the entities of the Pipeline, validating\n> > + * that each one was not adjusted by the driver from the desired format.\n> > + *\n> > + * \\return 0 on success or a negative errno otherwise\n> > + */\n> > +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)\n\nHrm ... this does currently tie MediaPipeline to only handlign from the\nCameraSensor forwards.\n\nIn the future I could envisage it being other partial sections of a\npipeline - but I think that can be dealt with if/when the need arises.\n\n\n> > +{\n> > +     int ret;\n> > +\n> > +     for (const Entity &e : entities_) {\n> > +             /* The sensor is configured through the CameraSensor */\n> > +             if (!e.sourceLink)\n> > +                     break;\n> > +\n> > +             MediaLink *link = e.sourceLink;\n> > +             MediaPad *source = link->source();\n> > +             MediaPad *sink = link->sink();\n> > +\n> > +             /* 'format' already contains the sensor configuration */\n> > +             if (source->entity() != sensor->entity()) {\n> > +                     /* Todo: Add MediaDevice cache to reduce FD pressure */\n> \n> s/Todo/\\todo/\n> \n> > +                     V4L2Subdevice subdev(source->entity());\n> > +                     ret = subdev.open();\n> \n> Trying to open a subdev multiple times return -EBUSY, if this function\n> is meant to be used by pipeline handlers they should be careful in\n> avoiding contentions.\n\nYes, at this scope I think it works - but indeed that's where the todo\nabove comes in. Simple pipeline handler has a device 'cache' of sorts\nwhich would be helpful to bring in next.\n\nI'd like to do that on top ...\n\n\n> > +                     if (ret)\n> > +                             return ret;\n> > +\n> > +                     ret = subdev.getFormat(source->index(), format);\n> > +                     if (ret < 0)\n> > +                             return ret;\n> > +             }\n> > +\n> > +             V4L2SubdeviceFormat sourceFormat = *format;\n> > +             /* Todo: Add MediaDevice cache to reduce FD pressure */\n> \n> s/Todo/\\todo/\n\nAck.\n\nThanks.\n\n\n> \n> > +             V4L2Subdevice subdev(sink->entity());\n> > +             ret = subdev.open();\n> > +             if (ret)\n> > +                     return ret;\n> > +\n> > +             ret = subdev.setFormat(sink->index(), format);\n> > +             if (ret < 0)\n> > +                     return ret;\n> > +\n> > +             if (format->code != sourceFormat.code ||\n> > +                 format->size != sourceFormat.size) {\n> > +                     LOG(MediaPipeline, Debug)\n> > +                             << \"Source '\" << *source\n> > +                             << \" produces \" << sourceFormat\n> > +                             << \", sink '\" << *sink\n> > +                             << \" requires \" << format;\n> > +                     return -EINVAL;\n> > +             }\n> > +\n> > +             LOG(MediaPipeline, Debug)\n> > +                     << \"Link \" << *link << \" configured with format \"\n> > +                     << format;\n> > +     }\n> > +\n> > +     return 0;\n> > +}\n> > +\n> > +} /* namespace libcamera */\n> > diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build\n> > index aa9ab0291854..2c0f8603b231 100644\n> > --- a/src/libcamera/meson.build\n> > +++ b/src/libcamera/meson.build\n> > @@ -41,6 +41,7 @@ libcamera_internal_sources = files([\n> >      'mapped_framebuffer.cpp',\n> >      'media_device.cpp',\n> >      'media_object.cpp',\n> > +    'media_pipeline.cpp',\n> >      'pipeline_handler.cpp',\n> >      'process.cpp',\n> >      'pub_key.cpp',\n> > --\n> > 2.46.0\n> >","headers":{"Return-Path":"<libcamera-devel-bounces@lists.libcamera.org>","X-Original-To":"parsemail@patchwork.libcamera.org","Delivered-To":"parsemail@patchwork.libcamera.org","Received":["from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id 13DD6C3257\n\tfor <parsemail@patchwork.libcamera.org>;\n\tThu,  3 Oct 2024 15:21:14 +0000 (UTC)","from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id EB4956351F;\n\tThu,  3 Oct 2024 17:21:12 +0200 (CEST)","from perceval.ideasonboard.com (perceval.ideasonboard.com\n\t[213.167.242.64])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id A6E3C62C92\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tThu,  3 Oct 2024 17:21:10 +0200 (CEST)","from pendragon.ideasonboard.com\n\t(cpc89244-aztw30-2-0-cust6594.18-1.cable.virginm.net [86.31.185.195])\n\tby perceval.ideasonboard.com (Postfix) with ESMTPSA id 3DC15593;\n\tThu,  3 Oct 2024 17:19:37 +0200 (CEST)"],"Authentication-Results":"lancelot.ideasonboard.com; dkim=pass (1024-bit key;\n\tunprotected) header.d=ideasonboard.com header.i=@ideasonboard.com\n\theader.b=\"MDzzB9Lz\"; dkim-atps=neutral","DKIM-Signature":"v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com;\n\ts=mail; t=1727968777;\n\tbh=1LAk7EYYkq2oEu3iLm8D3g5CrKGwyPm+s7UyO8jAt/Y=;\n\th=In-Reply-To:References:Subject:From:Cc:To:Date:From;\n\tb=MDzzB9LzEohCaKo5wANajzmZAPlfcDKmJR4t2028/TISdroaZX3ohY2cbz9XiWSNo\n\tLCR2AC3MKfIKm0DqUjE2wn/a0o7TnM1UvQA6jwlJu2A6cFA8HQF4oKUd9mJ13k1aBG\n\tgZMIs+10zoTGEskLtBC+DncUjnhrfmIj88GkfLmE=","Content-Type":"text/plain; charset=\"utf-8\"","MIME-Version":"1.0","Content-Transfer-Encoding":"quoted-printable","In-Reply-To":"<kkvf7kx3jufxeqlp4bqcheijx2tqpjhik3b2u4ggkc27ha4slc@c3zaiharvg4f>","References":"<20240916140241.47845-1-kieran.bingham@ideasonboard.com>\n\t<20240916140241.47845-4-kieran.bingham@ideasonboard.com>\n\t<kkvf7kx3jufxeqlp4bqcheijx2tqpjhik3b2u4ggkc27ha4slc@c3zaiharvg4f>","Subject":"Re: [PATCH 3/4] libcamera: internal: Add MediaPipeline helper","From":"Kieran Bingham <kieran.bingham@ideasonboard.com>","Cc":"libcamera devel <libcamera-devel@lists.libcamera.org>","To":"Jacopo Mondi <jacopo.mondi@ideasonboard.com>","Date":"Thu, 03 Oct 2024 16:21:07 +0100","Message-ID":"<172796886726.532453.10591206661391731330@ping.linuxembedded.co.uk>","User-Agent":"alot/0.10","X-BeenThere":"libcamera-devel@lists.libcamera.org","X-Mailman-Version":"2.1.29","Precedence":"list","List-Id":"<libcamera-devel.lists.libcamera.org>","List-Unsubscribe":"<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>","List-Archive":"<https://lists.libcamera.org/pipermail/libcamera-devel/>","List-Post":"<mailto:libcamera-devel@lists.libcamera.org>","List-Help":"<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>","List-Subscribe":"<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>","Errors-To":"libcamera-devel-bounces@lists.libcamera.org","Sender":"\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"}}]