[v3,3/4] libcamera: internal: Add MediaPipeline helper
diff mbox series

Message ID 20241008231314.744556-4-kieran.bingham@ideasonboard.com
State New
Headers show
Series
  • MediaPipeline: Complex input device support
Related show

Commit Message

Kieran Bingham Oct. 8, 2024, 11:13 p.m. UTC
Provide a MediaPipeline class to help identifing and managing pipelines across
a MediaDevice graph.

Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>

---
v2:

- use srcPads to clearly identify which pads are managed
- Only report enabling links when a change is made
- fix header includes
- Fix includes
- Remove period at end of briefs
- Document function parameters
- expand documentation throughout
- Fix debug log capitalisation
- reduce scope of single use 'ret'

v3:
- Add doxygen documentation for the format parameter of configure()
- Add 'format' identifier to configure parameter

 include/libcamera/internal/media_pipeline.h |  59 ++++
 include/libcamera/internal/meson.build      |   1 +
 src/libcamera/media_pipeline.cpp            | 311 ++++++++++++++++++++
 src/libcamera/meson.build                   |   1 +
 4 files changed, 372 insertions(+)
 create mode 100644 include/libcamera/internal/media_pipeline.h
 create mode 100644 src/libcamera/media_pipeline.cpp

Comments

Dan Scally Oct. 23, 2024, 10:31 p.m. UTC | #1
Hi Kieran, thanks for the patch

On 09/10/2024 00:13, Kieran Bingham wrote:
> Provide a MediaPipeline class to help identifing and managing pipelines across
> a MediaDevice graph.
>
> Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>
> ---
> v2:
>
> - use srcPads to clearly identify which pads are managed
> - Only report enabling links when a change is made
> - fix header includes
> - Fix includes
> - Remove period at end of briefs
> - Document function parameters
> - expand documentation throughout
> - Fix debug log capitalisation
> - reduce scope of single use 'ret'
>
> v3:
> - Add doxygen documentation for the format parameter of configure()
> - Add 'format' identifier to configure parameter
>
>   include/libcamera/internal/media_pipeline.h |  59 ++++
>   include/libcamera/internal/meson.build      |   1 +
>   src/libcamera/media_pipeline.cpp            | 311 ++++++++++++++++++++
>   src/libcamera/meson.build                   |   1 +
>   4 files changed, 372 insertions(+)
>   create mode 100644 include/libcamera/internal/media_pipeline.h
>   create mode 100644 src/libcamera/media_pipeline.cpp
>
> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> new file mode 100644
> index 000000000000..ee773b892719
> --- /dev/null
> +++ b/include/libcamera/internal/media_pipeline.h
> @@ -0,0 +1,59 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline handler
I think I would mildly prefer not to refer to something not derived from class PipelineHandler with 
the phrase "pipeline handler" but it's not a big deal
> + */
> +
> +#pragma once
> +
> +#include <list>
> +#include <string>
> +#include <vector>
> +
> +namespace libcamera {
> +
> +class CameraSensor;
> +class MediaEntity;
> +class MediaLink;
> +class MediaPad;
> +struct V4L2SubdeviceFormat;
> +
> +class MediaPipeline
> +{
> +public:
> +	int init(MediaEntity *source, std::string sink);
> +	int initLinks();
> +	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *format);
> +
> +private:
> +	struct Entity {
> +		/* The media entity, always valid. */
> +		MediaEntity *entity;
> +		/*
> +		 * Whether or not the entity is a subdev that supports the
> +		 * routing API.
> +		 */
> +		bool supportsRouting;
> +		/*
> +		 * The local sink pad connected to the upstream entity, null for
> +		 * the camera sensor at the beginning of the pipeline.
> +		 */
> +		const MediaPad *sink;
> +		/*
> +		 * The local source pad connected to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		const MediaPad *source;
> +		/*
> +		 * The link on the source pad, to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		MediaLink *sourceLink;
> +	};
> +
> +	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> +	std::list<Entity> entities_;
> +};
> +
> +} /* namespace libcamera */
> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> index 1c5eef9cab80..60a35d3e0357 100644
> --- a/include/libcamera/internal/meson.build
> +++ b/include/libcamera/internal/meson.build
> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
>       'mapped_framebuffer.h',
>       'media_device.h',
>       'media_object.h',
> +    'media_pipeline.h',
>       'pipeline_handler.h',
>       'process.h',
>       'pub_key.h',
> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> new file mode 100644
> index 000000000000..ec78b78e2f75
> --- /dev/null
> +++ b/src/libcamera/media_pipeline.cpp
> @@ -0,0 +1,311 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline support
> + */
> +
> +#include "libcamera/internal/media_pipeline.h"
> +
> +#include <algorithm>
> +#include <errno.h>
> +#include <memory>
> +#include <queue>
> +#include <tuple>
> +#include <unordered_map>
> +#include <unordered_set>
> +
> +#include <linux/media.h>
> +
> +#include <libcamera/base/log.h>
> +
> +#include "libcamera/internal/camera_sensor.h"
> +#include "libcamera/internal/media_device.h"
> +#include "libcamera/internal/media_object.h"
> +#include "libcamera/internal/v4l2_subdevice.h"
> +
> +/**
> + * \file media_pipeline.h
> + * \brief Provide a representation of a pipeline of devices using the Media
> + * Controller
> + */
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(MediaPipeline)
> +
> +/**
> + * \class MediaPipeline
> + * \brief The MediaPipeline represents a set of entities that together form a
> + * data path for stream data
> + *
> + * A MediaPipeline instance is constructed from a sink and a source between
> + * two entities in a media graph.
> + */
> +
> +/**
> + * \brief Retrieve all source pads connected to a sink pad through active routes
> + * \param[in] sink The sink pad to examine
> + *
> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> + * source pads which are connected with an active route to the sink pad.
> + *
> + * \return A vector of source MediaPads
> + */
> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> +{
> +	MediaEntity *entity = sink->entity();
> +	std::unique_ptr<V4L2Subdevice> subdev =
> +		std::make_unique<V4L2Subdevice>(entity);
> +
> +	int ret = subdev->open();
> +	if (ret < 0)
> +		return {};
> +
> +	V4L2Subdevice::Routing routing = {};
> +	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> +	if (ret < 0)
> +		return {};
> +
> +	std::vector<const MediaPad *> pads;
> +
> +	for (const V4L2Subdevice::Route &route : routing) {
> +		if (sink->index() != route.sink.pad ||
> +		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> +			continue;
> +
> +		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> +		if (!pad) {
> +			LOG(MediaPipeline, Warning)
> +				<< "Entity " << entity->name()
> +				<< " has invalid route source pad "
> +				<< route.source.pad;
> +		}
> +
> +		pads.push_back(pad);

Couldn't this result in a nullptr being pushed into the vector, and then dereferenced in ::init()? I 
doubt it's actually possible as presumably the kernel wouldn't return such invalid data, but still.

> +	}
> +
> +	return pads;
> +}
> +
> +/**
> + * \brief Find the path from source to sink
> + * \param[in] source The source entity to start from
> + * \param[in] sink The sink entity name to search for
> + *
> + * Starting from a source entity, determine the shortest path to the target
> + * described by sink.
> + *
> + * If sink can not be found, or a route from source to sink can not be achieved,
> + * an error of -ENOLINK will be returned.
> + *
> + * When successful, the MediaPipeline will internally store the representation
> + * of entities and links to describe the path between the two entities.
> + *
> + * It is expected that the Source entity is a sensor represented by the
> + * CameraSensor class.
Is it worth validating with the entity function? Up to you.
> + *
> + * \return 0 on success, a negative errno otherwise
> + */
> +int MediaPipeline::init(MediaEntity *source, std::string sink)
> +{
> +	/*
> +	 * Find the shortest path between from the source and the
> +	 * target entity.
> +	 */
> +	std::unordered_set<MediaEntity *> visited;
> +	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> +
> +	/* Remember at each entity where we came from. */
> +	std::unordered_map<MediaEntity *, Entity> parents;
> +	MediaEntity *entity = nullptr;
> +	MediaEntity *target = nullptr;
> +	MediaPad *sinkPad;
> +
> +	queue.push({ source, nullptr });
> +
> +	while (!queue.empty()) {
> +		std::tie(entity, sinkPad) = queue.front();
> +		queue.pop();
> +
> +		/* Found the target device. */
> +		if (entity->name() == sink) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Found pipeline target " << entity->name();
> +			target = entity;
> +			break;
> +		}
> +
> +		visited.insert(entity);
> +
> +		/*
> +		 * Add direct downstream entities to the search queue. If the
> +		 * current entity supports the subdev internal routing API,
> +		 * restrict the search to downstream entities reachable through
> +		 * active routes.
> +		 */
> +
> +		std::vector<const MediaPad *> srcPads;
> +		bool supportsRouting = false;
> +
> +		if (sinkPad) {
> +			srcPads = routedSourcePads(sinkPad);
> +			if (!srcPads.empty())
> +				supportsRouting = true;
> +		}
> +
> +		if (srcPads.empty()) {
> +			for (const MediaPad *pad : entity->pads()) {
> +				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> +					continue;
> +				srcPads.push_back(pad);
> +			}
> +		}
> +
> +		for (const MediaPad *srcPad : srcPads) {
> +			for (MediaLink *link : srcPad->links()) {
> +				MediaEntity *next = link->sink()->entity();
> +				if (visited.find(next) == visited.end()) {
> +					queue.push({ next, link->sink() });
> +
> +					Entity e{ entity, supportsRouting, sinkPad,
> +						  srcPad, link };
> +					parents.insert({ next, e });
> +				}
> +			}
> +		}
> +	}
> +
> +	if (!target) {
> +		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> +		return -ENOLINK;
> +	}
> +
> +	/*
> +	 * With the parents, we can follow back our way from the capture device
> +	 * to the sensor. Store all the entities in the pipeline, from the
> +	 * camera sensor to the video node, in entities_.
> +	 */
> +	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> +
> +	for (auto it = parents.find(entity); it != parents.end();
> +	     it = parents.find(entity)) {
> +		const Entity &e = it->second;
> +		entities_.push_front(e);
> +		entity = e.entity;
> +	}
> +
> +	LOG(MediaPipeline, Info)
> +		<< "Found pipeline: "
> +		<< utils::join(entities_, " -> ",
> +			       [](const Entity &e) {
> +				       std::string s = "[";
> +				       if (e.sink)
> +					       s += std::to_string(e.sink->index()) + "|";
> +				       s += e.entity->name();
> +				       if (e.source)
> +					       s += "|" + std::to_string(e.source->index());
> +				       s += "]";
> +				       return s;
> +			       });

That's a bit untidy...maybe something liiiiike:


std::ostringstream s;


s << "[";

e.sink && s << e.sink->index() << "|";

s << e.entity.name();

e.source && s << "|" << e.source->index();

s << "]"


return s.str()


Or maybe we just need a proper std::string.format() utility until c++ 20


> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Initialise and enable all links through the MediaPipeline
> + * \return 0 on success, or a negative errno otherwise
> + */
> +int MediaPipeline::initLinks()
> +{
> +	MediaLink *sinkLink = nullptr;
> +	for (Entity &e : entities_) {
> +		/* Sensor entities have no connected sink. */
> +		if (!sinkLink) {
> +			sinkLink = e.sourceLink;
> +			continue;
> +		}
> +
> +		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> +			LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
> +
> +			int ret = sinkLink->setEnabled(true);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		sinkLink = e.sourceLink;
> +	}
> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Configure the entities of this MediaPipeline
> + * \param[in] sensor The configured CameraSensor to propogate
> + * \param[inout] format The format to propogate through the pipeline
> + *
> + * Propagate formats through each of the entities of the Pipeline, validating
> + * that each one was not adjusted by the driver from the desired format.
> + *
> + * The format is updated with the final accepted format of the last entity of
> + * the pipeline.
> + *
> + * \return 0 on success or a negative errno otherwise
> + */
> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
> +{
> +	int ret;
> +
> +	for (const Entity &e : entities_) {
> +		/* The sensor is configured through the CameraSensor */
> +		if (!e.sourceLink)
> +			break;
> +
> +		MediaLink *link = e.sourceLink;
> +		MediaPad *source = link->source();
> +		MediaPad *sink = link->sink();
> +
> +		/* 'format' already contains the sensor configuration */
> +		if (source->entity() != sensor->entity()) {
> +			/* todo: Add MediaDevice cache to reduce FD pressure */
s/todo:/\todo/
> +			V4L2Subdevice subdev(source->entity());
> +			ret = subdev.open();
> +			if (ret)
> +				return ret;
> +
> +			ret = subdev.getFormat(source->index(), format);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		V4L2SubdeviceFormat sourceFormat = *format;
> +		/* todo: Add MediaDevice cache to reduce FD pressure */
> +		V4L2Subdevice subdev(sink->entity());
> +		ret = subdev.open();
> +		if (ret)
> +			return ret;
> +
> +		ret = subdev.setFormat(sink->index(), format);
> +		if (ret < 0)
> +			return ret;
> +
> +		if (format->code != sourceFormat.code ||
> +		    format->size != sourceFormat.size) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Source '" << *source
> +				<< " produces " << sourceFormat
> +				<< ", sink '" << *sink
> +				<< " requires " << format;
> +			return -EINVAL;
> +		}
> +
> +		LOG(MediaPipeline, Debug)
> +			<< "Link " << *link << " configured with format "
> +			<< format;
> +	}
> +
> +	return 0;
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> index aa9ab0291854..2c0f8603b231 100644
> --- a/src/libcamera/meson.build
> +++ b/src/libcamera/meson.build
> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
>       'mapped_framebuffer.cpp',
>       'media_device.cpp',
>       'media_object.cpp',
> +    'media_pipeline.cpp',
>       'pipeline_handler.cpp',
>       'process.cpp',
>       'pub_key.cpp',
Dan Scally Oct. 24, 2024, 9:20 a.m. UTC | #2
So coming back to this after reading patch 4

On 23/10/2024 23:31, Dan Scally wrote:
> Hi Kieran, thanks for the patch
>
> On 09/10/2024 00:13, Kieran Bingham wrote:
>> Provide a MediaPipeline class to help identifing and managing pipelines across
>> a MediaDevice graph.
>>
>> Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
>> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>>
>> ---
>> v2:
>>
>> - use srcPads to clearly identify which pads are managed
>> - Only report enabling links when a change is made
>> - fix header includes
>> - Fix includes
>> - Remove period at end of briefs
>> - Document function parameters
>> - expand documentation throughout
>> - Fix debug log capitalisation
>> - reduce scope of single use 'ret'
>>
>> v3:
>> - Add doxygen documentation for the format parameter of configure()
>> - Add 'format' identifier to configure parameter
>>
>>   include/libcamera/internal/media_pipeline.h |  59 ++++
>>   include/libcamera/internal/meson.build      |   1 +
>>   src/libcamera/media_pipeline.cpp            | 311 ++++++++++++++++++++
>>   src/libcamera/meson.build                   |   1 +
>>   4 files changed, 372 insertions(+)
>>   create mode 100644 include/libcamera/internal/media_pipeline.h
>>   create mode 100644 src/libcamera/media_pipeline.cpp
>>
>> diff --git a/include/libcamera/internal/media_pipeline.h 
>> b/include/libcamera/internal/media_pipeline.h
>> new file mode 100644
>> index 000000000000..ee773b892719
>> --- /dev/null
>> +++ b/include/libcamera/internal/media_pipeline.h
>> @@ -0,0 +1,59 @@
>> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
>> +/*
>> + * Copyright (C) 2024, Ideas on Board Oy
>> + *
>> + * Media pipeline handler
> I think I would mildly prefer not to refer to something not derived from class PipelineHandler 
> with the phrase "pipeline handler" but it's not a big deal
>> + */
>> +
>> +#pragma once
>> +
>> +#include <list>
>> +#include <string>
>> +#include <vector>
>> +
>> +namespace libcamera {
>> +
>> +class CameraSensor;
>> +class MediaEntity;
>> +class MediaLink;
>> +class MediaPad;
>> +struct V4L2SubdeviceFormat;
>> +
>> +class MediaPipeline
>> +{
>> +public:
>> +    int init(MediaEntity *source, std::string sink);
>> +    int initLinks();
>> +    int configure(CameraSensor *sensor, V4L2SubdeviceFormat *format);
>> +
>> +private:
>> +    struct Entity {
>> +        /* The media entity, always valid. */
>> +        MediaEntity *entity;
>> +        /*
>> +         * Whether or not the entity is a subdev that supports the
>> +         * routing API.
>> +         */
>> +        bool supportsRouting;
>> +        /*
>> +         * The local sink pad connected to the upstream entity, null for
>> +         * the camera sensor at the beginning of the pipeline.
>> +         */
>> +        const MediaPad *sink;
>> +        /*
>> +         * The local source pad connected to the downstream entity, null
>> +         * for the video node at the end of the pipeline.
>> +         */
>> +        const MediaPad *source;
>> +        /*
>> +         * The link on the source pad, to the downstream entity, null
>> +         * for the video node at the end of the pipeline.
>> +         */
>> +        MediaLink *sourceLink;
>> +    };
I think I'd make the comments a bit clearer that it might be a subdevice entity at the end (though 
source and sourceLink will still be null)
>> +
>> +    std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
>> +    std::list<Entity> entities_;
>> +};
>> +
>> +} /* namespace libcamera */
>> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
>> index 1c5eef9cab80..60a35d3e0357 100644
>> --- a/include/libcamera/internal/meson.build
>> +++ b/include/libcamera/internal/meson.build
>> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
>>       'mapped_framebuffer.h',
>>       'media_device.h',
>>       'media_object.h',
>> +    'media_pipeline.h',
>>       'pipeline_handler.h',
>>       'process.h',
>>       'pub_key.h',
>> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
>> new file mode 100644
>> index 000000000000..ec78b78e2f75
>> --- /dev/null
>> +++ b/src/libcamera/media_pipeline.cpp
>> @@ -0,0 +1,311 @@
>> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
>> +/*
>> + * Copyright (C) 2024, Ideas on Board Oy
>> + *
>> + * Media pipeline support
>> + */
>> +
>> +#include "libcamera/internal/media_pipeline.h"
>> +
>> +#include <algorithm>
>> +#include <errno.h>
>> +#include <memory>
>> +#include <queue>
>> +#include <tuple>
>> +#include <unordered_map>
>> +#include <unordered_set>
>> +
>> +#include <linux/media.h>
>> +
>> +#include <libcamera/base/log.h>
>> +
>> +#include "libcamera/internal/camera_sensor.h"
>> +#include "libcamera/internal/media_device.h"
>> +#include "libcamera/internal/media_object.h"
>> +#include "libcamera/internal/v4l2_subdevice.h"
>> +
>> +/**
>> + * \file media_pipeline.h
>> + * \brief Provide a representation of a pipeline of devices using the Media
>> + * Controller
>> + */
>> +
>> +namespace libcamera {
>> +
>> +LOG_DEFINE_CATEGORY(MediaPipeline)
>> +
>> +/**
>> + * \class MediaPipeline
>> + * \brief The MediaPipeline represents a set of entities that together form a
>> + * data path for stream data
>> + *
>> + * A MediaPipeline instance is constructed from a sink and a source between
>> + * two entities in a media graph.
>> + */
>> +
>> +/**
>> + * \brief Retrieve all source pads connected to a sink pad through active routes
>> + * \param[in] sink The sink pad to examine
>> + *
>> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
>> + * source pads which are connected with an active route to the sink pad.
>> + *
>> + * \return A vector of source MediaPads
>> + */
>> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
>> +{
>> +    MediaEntity *entity = sink->entity();
>> +    std::unique_ptr<V4L2Subdevice> subdev =
>> +        std::make_unique<V4L2Subdevice>(entity);
>> +
>> +    int ret = subdev->open();
>> +    if (ret < 0)
>> +        return {};
>> +
>> +    V4L2Subdevice::Routing routing = {};
>> +    ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
>> +    if (ret < 0)
>> +        return {};
>> +
>> +    std::vector<const MediaPad *> pads;
>> +
>> +    for (const V4L2Subdevice::Route &route : routing) {
>> +        if (sink->index() != route.sink.pad ||
>> +            !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
>> +            continue;
>> +
>> +        const MediaPad *pad = entity->getPadByIndex(route.source.pad);
>> +        if (!pad) {
>> +            LOG(MediaPipeline, Warning)
>> +                << "Entity " << entity->name()
>> +                << " has invalid route source pad "
>> +                << route.source.pad;
>> +        }
>> +
>> +        pads.push_back(pad);
>
> Couldn't this result in a nullptr being pushed into the vector, and then dereferenced in ::init()? 
> I doubt it's actually possible as presumably the kernel wouldn't return such invalid data, but still.
>
>> +    }
>> +
>> +    return pads;
>> +}
>> +
>> +/**
>> + * \brief Find the path from source to sink
>> + * \param[in] source The source entity to start from
>> + * \param[in] sink The sink entity name to search for
>> + *
>> + * Starting from a source entity, determine the shortest path to the target
>> + * described by sink.
>> + *
>> + * If sink can not be found, or a route from source to sink can not be achieved,
>> + * an error of -ENOLINK will be returned.
>> + *
>> + * When successful, the MediaPipeline will internally store the representation
>> + * of entities and links to describe the path between the two entities.
>> + *
>> + * It is expected that the Source entity is a sensor represented by the
>> + * CameraSensor class.
> Is it worth validating with the entity function? Up to you.
>> + *
>> + * \return 0 on success, a negative errno otherwise
>> + */
>> +int MediaPipeline::init(MediaEntity *source, std::string sink)
>> +{
>> +    /*
>> +     * Find the shortest path between from the source and the
>> +     * target entity.
>> +     */
>> +    std::unordered_set<MediaEntity *> visited;
>> +    std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
>> +
>> +    /* Remember at each entity where we came from. */
>> +    std::unordered_map<MediaEntity *, Entity> parents;
>> +    MediaEntity *entity = nullptr;
>> +    MediaEntity *target = nullptr;
>> +    MediaPad *sinkPad;
>> +
>> +    queue.push({ source, nullptr });
>> +
>> +    while (!queue.empty()) {
>> +        std::tie(entity, sinkPad) = queue.front();
>> +        queue.pop();
>> +
>> +        /* Found the target device. */
>> +        if (entity->name() == sink) {
>> +            LOG(MediaPipeline, Debug)
>> +                << "Found pipeline target " << entity->name();
>> +            target = entity;
>> +            break;
>> +        }
>> +
>> +        visited.insert(entity);
>> +
>> +        /*
>> +         * Add direct downstream entities to the search queue. If the
>> +         * current entity supports the subdev internal routing API,
>> +         * restrict the search to downstream entities reachable through
>> +         * active routes.
>> +         */
>> +
>> +        std::vector<const MediaPad *> srcPads;
>> +        bool supportsRouting = false;
>> +
>> +        if (sinkPad) {
>> +            srcPads = routedSourcePads(sinkPad);
>> +            if (!srcPads.empty())
>> +                supportsRouting = true;
>> +        }
>> +
>> +        if (srcPads.empty()) {
>> +            for (const MediaPad *pad : entity->pads()) {
>> +                if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
>> +                    continue;
>> +                srcPads.push_back(pad);
>> +            }
>> +        }
>> +
>> +        for (const MediaPad *srcPad : srcPads) {
>> +            for (MediaLink *link : srcPad->links()) {
>> +                MediaEntity *next = link->sink()->entity();
>> +                if (visited.find(next) == visited.end()) {
>> +                    queue.push({ next, link->sink() });
>> +
>> +                    Entity e{ entity, supportsRouting, sinkPad,
>> +                          srcPad, link };
>> +                    parents.insert({ next, e });
>> +                }
>> +            }
>> +        }
>> +    }
>> +
>> +    if (!target) {
>> +        LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
>> +        return -ENOLINK;
>> +    }
>> +
>> +    /*
>> +     * With the parents, we can follow back our way from the capture device
>> +     * to the sensor. Store all the entities in the pipeline, from the
>> +     * camera sensor to the video node, in entities_.
>> +     */


And same here perhaps - there might not be a video node at the end of the pipeline.

>> +    entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
>> +
>> +    for (auto it = parents.find(entity); it != parents.end();
>> +         it = parents.find(entity)) {
>> +        const Entity &e = it->second;
>> +        entities_.push_front(e);
>> +        entity = e.entity;
>> +    }
>> +
>> +    LOG(MediaPipeline, Info)
>> +        << "Found pipeline: "
>> +        << utils::join(entities_, " -> ",
>> +                   [](const Entity &e) {
>> +                       std::string s = "[";
>> +                       if (e.sink)
>> +                           s += std::to_string(e.sink->index()) + "|";
>> +                       s += e.entity->name();
>> +                       if (e.source)
>> +                           s += "|" + std::to_string(e.source->index());
>> +                       s += "]";
>> +                       return s;
>> +                   });
>
> That's a bit untidy...maybe something liiiiike:
>
>
> std::ostringstream s;
>
>
> s << "[";
>
> e.sink && s << e.sink->index() << "|";
>
> s << e.entity.name();
>
> e.source && s << "|" << e.source->index();
>
> s << "]"
>
>
> return s.str()
>
>
> Or maybe we just need a proper std::string.format() utility until c++ 20
>
>
>> +
>> +    return 0;
>> +}
>> +
>> +/**
>> + * \brief Initialise and enable all links through the MediaPipeline
>> + * \return 0 on success, or a negative errno otherwise
>> + */
>> +int MediaPipeline::initLinks()
>> +{
>> +    MediaLink *sinkLink = nullptr;
>> +    for (Entity &e : entities_) {
>> +        /* Sensor entities have no connected sink. */
>> +        if (!sinkLink) {
>> +            sinkLink = e.sourceLink;
>> +            continue;
>> +        }
>> +
>> +        if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
>> +            LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
>> +
>> +            int ret = sinkLink->setEnabled(true);
>> +            if (ret < 0)
>> +                return ret;
>> +        }
>> +
>> +        sinkLink = e.sourceLink;
>> +    }
>> +
>> +    return 0;
>> +}
>> +
>> +/**
>> + * \brief Configure the entities of this MediaPipeline
>> + * \param[in] sensor The configured CameraSensor to propogate
>> + * \param[inout] format The format to propogate through the pipeline
>> + *
>> + * Propagate formats through each of the entities of the Pipeline, validating
>> + * that each one was not adjusted by the driver from the desired format.
>> + *
>> + * The format is updated with the final accepted format of the last entity of
>> + * the pipeline.
>> + *
>> + * \return 0 on success or a negative errno otherwise
>> + */
>> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
>> +{
>> +    int ret;
>> +
>> +    for (const Entity &e : entities_) {
>> +        /* The sensor is configured through the CameraSensor */
>> +        if (!e.sourceLink)
>> +            break;
>> +
>> +        MediaLink *link = e.sourceLink;
>> +        MediaPad *source = link->source();
>> +        MediaPad *sink = link->sink();
>> +
>> +        /* 'format' already contains the sensor configuration */
>> +        if (source->entity() != sensor->entity()) {
>> +            /* todo: Add MediaDevice cache to reduce FD pressure */
> s/todo:/\todo/
>> +            V4L2Subdevice subdev(source->entity());
>> +            ret = subdev.open();
>> +            if (ret)
>> +                return ret;
>> +
>> +            ret = subdev.getFormat(source->index(), format);
>> +            if (ret < 0)
>> +                return ret;
>> +        }
>> +
>> +        V4L2SubdeviceFormat sourceFormat = *format;
>> +        /* todo: Add MediaDevice cache to reduce FD pressure */
>> +        V4L2Subdevice subdev(sink->entity());
>> +        ret = subdev.open();
>> +        if (ret)
>> +            return ret;
>> +
>> +        ret = subdev.setFormat(sink->index(), format);
>> +        if (ret < 0)
>> +            return ret;
>> +
>> +        if (format->code != sourceFormat.code ||
>> +            format->size != sourceFormat.size) {
>> +            LOG(MediaPipeline, Debug)
>> +                << "Source '" << *source
>> +                << " produces " << sourceFormat
>> +                << ", sink '" << *sink
>> +                << " requires " << format;
>> +            return -EINVAL;
>> +        }
>> +
>> +        LOG(MediaPipeline, Debug)
>> +            << "Link " << *link << " configured with format "
>> +            << format;
>> +    }
>> +
>> +    return 0;
>> +}
>> +
>> +} /* namespace libcamera */
>> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
>> index aa9ab0291854..2c0f8603b231 100644
>> --- a/src/libcamera/meson.build
>> +++ b/src/libcamera/meson.build
>> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
>>       'mapped_framebuffer.cpp',
>>       'media_device.cpp',
>>       'media_object.cpp',
>> +    'media_pipeline.cpp',
>>       'pipeline_handler.cpp',
>>       'process.cpp',
>>       'pub_key.cpp',
Jacopo Mondi Oct. 24, 2024, 4:22 p.m. UTC | #3
Hi Kieran

On Wed, Oct 09, 2024 at 12:13:13AM +0100, Kieran Bingham wrote:
> Provide a MediaPipeline class to help identifing and managing pipelines across
> a MediaDevice graph.
>
> Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>
> ---
> v2:
>
> - use srcPads to clearly identify which pads are managed
> - Only report enabling links when a change is made
> - fix header includes
> - Fix includes
> - Remove period at end of briefs
> - Document function parameters
> - expand documentation throughout
> - Fix debug log capitalisation
> - reduce scope of single use 'ret'
>
> v3:
> - Add doxygen documentation for the format parameter of configure()
> - Add 'format' identifier to configure parameter
>
>  include/libcamera/internal/media_pipeline.h |  59 ++++
>  include/libcamera/internal/meson.build      |   1 +
>  src/libcamera/media_pipeline.cpp            | 311 ++++++++++++++++++++
>  src/libcamera/meson.build                   |   1 +
>  4 files changed, 372 insertions(+)
>  create mode 100644 include/libcamera/internal/media_pipeline.h
>  create mode 100644 src/libcamera/media_pipeline.cpp
>
> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> new file mode 100644
> index 000000000000..ee773b892719
> --- /dev/null
> +++ b/include/libcamera/internal/media_pipeline.h
> @@ -0,0 +1,59 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline handler
> + */
> +
> +#pragma once
> +
> +#include <list>
> +#include <string>
> +#include <vector>
> +
> +namespace libcamera {
> +
> +class CameraSensor;
> +class MediaEntity;
> +class MediaLink;
> +class MediaPad;
> +struct V4L2SubdeviceFormat;
> +
> +class MediaPipeline
> +{
> +public:
> +	int init(MediaEntity *source, std::string sink);
> +	int initLinks();
> +	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *format);
> +
> +private:
> +	struct Entity {
> +		/* The media entity, always valid. */
> +		MediaEntity *entity;
> +		/*
> +		 * Whether or not the entity is a subdev that supports the
> +		 * routing API.
> +		 */
> +		bool supportsRouting;
> +		/*
> +		 * The local sink pad connected to the upstream entity, null for
> +		 * the camera sensor at the beginning of the pipeline.
> +		 */
> +		const MediaPad *sink;
> +		/*
> +		 * The local source pad connected to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		const MediaPad *source;
> +		/*
> +		 * The link on the source pad, to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		MediaLink *sourceLink;
> +	};
> +
> +	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> +	std::list<Entity> entities_;
> +};
> +
> +} /* namespace libcamera */
> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> index 1c5eef9cab80..60a35d3e0357 100644
> --- a/include/libcamera/internal/meson.build
> +++ b/include/libcamera/internal/meson.build
> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
>      'mapped_framebuffer.h',
>      'media_device.h',
>      'media_object.h',
> +    'media_pipeline.h',
>      'pipeline_handler.h',
>      'process.h',
>      'pub_key.h',
> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> new file mode 100644
> index 000000000000..ec78b78e2f75
> --- /dev/null
> +++ b/src/libcamera/media_pipeline.cpp
> @@ -0,0 +1,311 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline support
> + */
> +
> +#include "libcamera/internal/media_pipeline.h"
> +
> +#include <algorithm>
> +#include <errno.h>
> +#include <memory>
> +#include <queue>
> +#include <tuple>
> +#include <unordered_map>
> +#include <unordered_set>
> +
> +#include <linux/media.h>
> +
> +#include <libcamera/base/log.h>
> +
> +#include "libcamera/internal/camera_sensor.h"
> +#include "libcamera/internal/media_device.h"
> +#include "libcamera/internal/media_object.h"
> +#include "libcamera/internal/v4l2_subdevice.h"
> +
> +/**
> + * \file media_pipeline.h
> + * \brief Provide a representation of a pipeline of devices using the Media
> + * Controller
> + */
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(MediaPipeline)
> +
> +/**
> + * \class MediaPipeline
> + * \brief The MediaPipeline represents a set of entities that together form a
> + * data path for stream data
> + *
> + * A MediaPipeline instance is constructed from a sink and a source between
> + * two entities in a media graph.
> + */
> +
> +/**
> + * \brief Retrieve all source pads connected to a sink pad through active routes
> + * \param[in] sink The sink pad to examine
> + *
> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> + * source pads which are connected with an active route to the sink pad.
> + *
> + * \return A vector of source MediaPads
> + */
> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> +{
> +	MediaEntity *entity = sink->entity();
> +	std::unique_ptr<V4L2Subdevice> subdev =
> +		std::make_unique<V4L2Subdevice>(entity);
> +
> +	int ret = subdev->open();

I still don't see how this can't create contentions with the pipeline
handler

        int V4L2Device::open(unsigned int flags)
        {
                if (isOpen()) {
                        LOG(V4L2, Error) << "Device already open";
                        return -EBUSY;
                }

> +	if (ret < 0)
> +		return {};
> +
> +	V4L2Subdevice::Routing routing = {};
> +	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> +	if (ret < 0)
> +		return {};
> +
> +	std::vector<const MediaPad *> pads;
> +
> +	for (const V4L2Subdevice::Route &route : routing) {
> +		if (sink->index() != route.sink.pad ||
> +		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> +			continue;
> +
> +		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> +		if (!pad) {
> +			LOG(MediaPipeline, Warning)
> +				<< "Entity " << entity->name()
> +				<< " has invalid route source pad "
> +				<< route.source.pad;
> +		}
> +
> +		pads.push_back(pad);
> +	}
> +
> +	return pads;
> +}
> +
> +/**
> + * \brief Find the path from source to sink
> + * \param[in] source The source entity to start from
> + * \param[in] sink The sink entity name to search for
> + *
> + * Starting from a source entity, determine the shortest path to the target
> + * described by sink.

by \a sink

> + *
> + * If sink can not be found, or a route from source to sink can not be achieved,

if \a sink

> + * an error of -ENOLINK will be returned.
> + *
> + * When successful, the MediaPipeline will internally store the representation
> + * of entities and links to describe the path between the two entities.
> + *
> + * It is expected that the Source entity is a sensor represented by the
> + * CameraSensor class.

This confuses me a bit as the class seems to be generically connect
entities and doesn't require to start from a sensor. Or does it ?

> + *
> + * \return 0 on success, a negative errno otherwise
> + */
> +int MediaPipeline::init(MediaEntity *source, std::string sink)
> +{
> +	/*
> +	 * Find the shortest path between from the source and the

s/between from/between/

> +	 * target entity.
> +	 */
> +	std::unordered_set<MediaEntity *> visited;
> +	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> +
> +	/* Remember at each entity where we came from. */
> +	std::unordered_map<MediaEntity *, Entity> parents;
> +	MediaEntity *entity = nullptr;
> +	MediaEntity *target = nullptr;
> +	MediaPad *sinkPad;
> +
> +	queue.push({ source, nullptr });
> +
> +	while (!queue.empty()) {
> +		std::tie(entity, sinkPad) = queue.front();
> +		queue.pop();
> +
> +		/* Found the target device. */
> +		if (entity->name() == sink) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Found pipeline target " << entity->name();
> +			target = entity;
> +			break;
> +		}
> +
> +		visited.insert(entity);
> +
> +		/*
> +		 * Add direct downstream entities to the search queue. If the
> +		 * current entity supports the subdev internal routing API,
> +		 * restrict the search to downstream entities reachable through
> +		 * active routes.
> +		 */
> +
> +		std::vector<const MediaPad *> srcPads;
> +		bool supportsRouting = false;
> +
> +		if (sinkPad) {
> +			srcPads = routedSourcePads(sinkPad);
> +			if (!srcPads.empty())
> +				supportsRouting = true;
> +		}
> +
> +		if (srcPads.empty()) {
> +			for (const MediaPad *pad : entity->pads()) {
> +				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> +					continue;
> +				srcPads.push_back(pad);
> +			}
> +		}
> +
> +		for (const MediaPad *srcPad : srcPads) {
> +			for (MediaLink *link : srcPad->links()) {
> +				MediaEntity *next = link->sink()->entity();
> +				if (visited.find(next) == visited.end()) {
> +					queue.push({ next, link->sink() });
> +
> +					Entity e{ entity, supportsRouting, sinkPad,
> +						  srcPad, link };
> +					parents.insert({ next, e });
> +				}
> +			}
> +		}
> +	}
> +
> +	if (!target) {
> +		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> +		return -ENOLINK;
> +	}
> +
> +	/*
> +	 * With the parents, we can follow back our way from the capture device
> +	 * to the sensor. Store all the entities in the pipeline, from the
> +	 * camera sensor to the video node, in entities_.
> +	 */
> +	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> +
> +	for (auto it = parents.find(entity); it != parents.end();
> +	     it = parents.find(entity)) {
> +		const Entity &e = it->second;
> +		entities_.push_front(e);
> +		entity = e.entity;
> +	}
> +
> +	LOG(MediaPipeline, Info)
> +		<< "Found pipeline: "
> +		<< utils::join(entities_, " -> ",
> +			       [](const Entity &e) {
> +				       std::string s = "[";
> +				       if (e.sink)
> +					       s += std::to_string(e.sink->index()) + "|";
> +				       s += e.entity->name();
> +				       if (e.source)
> +					       s += "|" + std::to_string(e.source->index());
> +				       s += "]";
> +				       return s;
> +			       });
> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Initialise and enable all links through the MediaPipeline
> + * \return 0 on success, or a negative errno otherwise
> + */
> +int MediaPipeline::initLinks()
> +{
> +	MediaLink *sinkLink = nullptr;
> +	for (Entity &e : entities_) {
> +		/* Sensor entities have no connected sink. */
> +		if (!sinkLink) {
> +			sinkLink = e.sourceLink;
> +			continue;
> +		}
> +
> +		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> +			LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
> +
> +			int ret = sinkLink->setEnabled(true);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		sinkLink = e.sourceLink;
> +	}
> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Configure the entities of this MediaPipeline
> + * \param[in] sensor The configured CameraSensor to propogate
> + * \param[inout] format The format to propogate through the pipeline

s/propogate/propagate/

> + *
> + * Propagate formats through each of the entities of the Pipeline, validating
> + * that each one was not adjusted by the driver from the desired format.
> + *
> + * The format is updated with the final accepted format of the last entity of
> + * the pipeline.
> + *
> + * \return 0 on success or a negative errno otherwise
> + */
> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)

Is this the right API ? A caller should pass in the sensor (as
pointer, is it modifiable?) and its format. What if this function only
takes a sensor a returns the format applied at the end of the pipeline ?

> +{
> +	int ret;
> +
> +	for (const Entity &e : entities_) {
> +		/* The sensor is configured through the CameraSensor */
> +		if (!e.sourceLink)

Sorry, but !sourceLink doesn't indentify the video device (while the
comment seems to hint it identifies the sensor)

> +			break;
> +
> +		MediaLink *link = e.sourceLink;
> +		MediaPad *source = link->source();
> +		MediaPad *sink = link->sink();
> +
> +		/* 'format' already contains the sensor configuration */
> +		if (source->entity() != sensor->entity()) {
> +			/* todo: Add MediaDevice cache to reduce FD pressure */
> +			V4L2Subdevice subdev(source->entity());
> +			ret = subdev.open();
> +			if (ret)
> +				return ret;
> +
> +			ret = subdev.getFormat(source->index(), format);
> +			if (ret < 0)
> +				return ret;

I'm still a bit scared about contentions of the subdevices. Probably,
if a pipeline handler uses MediaPipeline it is not supposed to handle
subdevs by itself ?

All minors though
Reviewed-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com>

Thanks
  j


> +		}
> +
> +		V4L2SubdeviceFormat sourceFormat = *format;
> +		/* todo: Add MediaDevice cache to reduce FD pressure */
> +		V4L2Subdevice subdev(sink->entity());
> +		ret = subdev.open();
> +		if (ret)
> +			return ret;
> +
> +		ret = subdev.setFormat(sink->index(), format);
> +		if (ret < 0)
> +			return ret;
> +
> +		if (format->code != sourceFormat.code ||
> +		    format->size != sourceFormat.size) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Source '" << *source
> +				<< " produces " << sourceFormat
> +				<< ", sink '" << *sink
> +				<< " requires " << format;
> +			return -EINVAL;
> +		}
> +
> +		LOG(MediaPipeline, Debug)
> +			<< "Link " << *link << " configured with format "
> +			<< format;
> +	}
> +
> +	return 0;
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> index aa9ab0291854..2c0f8603b231 100644
> --- a/src/libcamera/meson.build
> +++ b/src/libcamera/meson.build
> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
>      'mapped_framebuffer.cpp',
>      'media_device.cpp',
>      'media_object.cpp',
> +    'media_pipeline.cpp',
>      'pipeline_handler.cpp',
>      'process.cpp',
>      'pub_key.cpp',
> --
> 2.34.1
>

Patch
diff mbox series

diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
new file mode 100644
index 000000000000..ee773b892719
--- /dev/null
+++ b/include/libcamera/internal/media_pipeline.h
@@ -0,0 +1,59 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2024, Ideas on Board Oy
+ *
+ * Media pipeline handler
+ */
+
+#pragma once
+
+#include <list>
+#include <string>
+#include <vector>
+
+namespace libcamera {
+
+class CameraSensor;
+class MediaEntity;
+class MediaLink;
+class MediaPad;
+struct V4L2SubdeviceFormat;
+
+class MediaPipeline
+{
+public:
+	int init(MediaEntity *source, std::string sink);
+	int initLinks();
+	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *format);
+
+private:
+	struct Entity {
+		/* The media entity, always valid. */
+		MediaEntity *entity;
+		/*
+		 * Whether or not the entity is a subdev that supports the
+		 * routing API.
+		 */
+		bool supportsRouting;
+		/*
+		 * The local sink pad connected to the upstream entity, null for
+		 * the camera sensor at the beginning of the pipeline.
+		 */
+		const MediaPad *sink;
+		/*
+		 * The local source pad connected to the downstream entity, null
+		 * for the video node at the end of the pipeline.
+		 */
+		const MediaPad *source;
+		/*
+		 * The link on the source pad, to the downstream entity, null
+		 * for the video node at the end of the pipeline.
+		 */
+		MediaLink *sourceLink;
+	};
+
+	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
+	std::list<Entity> entities_;
+};
+
+} /* namespace libcamera */
diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
index 1c5eef9cab80..60a35d3e0357 100644
--- a/include/libcamera/internal/meson.build
+++ b/include/libcamera/internal/meson.build
@@ -30,6 +30,7 @@  libcamera_internal_headers = files([
     'mapped_framebuffer.h',
     'media_device.h',
     'media_object.h',
+    'media_pipeline.h',
     'pipeline_handler.h',
     'process.h',
     'pub_key.h',
diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
new file mode 100644
index 000000000000..ec78b78e2f75
--- /dev/null
+++ b/src/libcamera/media_pipeline.cpp
@@ -0,0 +1,311 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2024, Ideas on Board Oy
+ *
+ * Media pipeline support
+ */
+
+#include "libcamera/internal/media_pipeline.h"
+
+#include <algorithm>
+#include <errno.h>
+#include <memory>
+#include <queue>
+#include <tuple>
+#include <unordered_map>
+#include <unordered_set>
+
+#include <linux/media.h>
+
+#include <libcamera/base/log.h>
+
+#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/media_device.h"
+#include "libcamera/internal/media_object.h"
+#include "libcamera/internal/v4l2_subdevice.h"
+
+/**
+ * \file media_pipeline.h
+ * \brief Provide a representation of a pipeline of devices using the Media
+ * Controller
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(MediaPipeline)
+
+/**
+ * \class MediaPipeline
+ * \brief The MediaPipeline represents a set of entities that together form a
+ * data path for stream data
+ *
+ * A MediaPipeline instance is constructed from a sink and a source between
+ * two entities in a media graph.
+ */
+
+/**
+ * \brief Retrieve all source pads connected to a sink pad through active routes
+ * \param[in] sink The sink pad to examine
+ *
+ * Examine the entity using the V4L2 Subdevice Routing API to collect all the
+ * source pads which are connected with an active route to the sink pad.
+ *
+ * \return A vector of source MediaPads
+ */
+std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
+{
+	MediaEntity *entity = sink->entity();
+	std::unique_ptr<V4L2Subdevice> subdev =
+		std::make_unique<V4L2Subdevice>(entity);
+
+	int ret = subdev->open();
+	if (ret < 0)
+		return {};
+
+	V4L2Subdevice::Routing routing = {};
+	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
+	if (ret < 0)
+		return {};
+
+	std::vector<const MediaPad *> pads;
+
+	for (const V4L2Subdevice::Route &route : routing) {
+		if (sink->index() != route.sink.pad ||
+		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
+			continue;
+
+		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
+		if (!pad) {
+			LOG(MediaPipeline, Warning)
+				<< "Entity " << entity->name()
+				<< " has invalid route source pad "
+				<< route.source.pad;
+		}
+
+		pads.push_back(pad);
+	}
+
+	return pads;
+}
+
+/**
+ * \brief Find the path from source to sink
+ * \param[in] source The source entity to start from
+ * \param[in] sink The sink entity name to search for
+ *
+ * Starting from a source entity, determine the shortest path to the target
+ * described by sink.
+ *
+ * If sink can not be found, or a route from source to sink can not be achieved,
+ * an error of -ENOLINK will be returned.
+ *
+ * When successful, the MediaPipeline will internally store the representation
+ * of entities and links to describe the path between the two entities.
+ *
+ * It is expected that the Source entity is a sensor represented by the
+ * CameraSensor class.
+ *
+ * \return 0 on success, a negative errno otherwise
+ */
+int MediaPipeline::init(MediaEntity *source, std::string sink)
+{
+	/*
+	 * Find the shortest path between from the source and the
+	 * target entity.
+	 */
+	std::unordered_set<MediaEntity *> visited;
+	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
+
+	/* Remember at each entity where we came from. */
+	std::unordered_map<MediaEntity *, Entity> parents;
+	MediaEntity *entity = nullptr;
+	MediaEntity *target = nullptr;
+	MediaPad *sinkPad;
+
+	queue.push({ source, nullptr });
+
+	while (!queue.empty()) {
+		std::tie(entity, sinkPad) = queue.front();
+		queue.pop();
+
+		/* Found the target device. */
+		if (entity->name() == sink) {
+			LOG(MediaPipeline, Debug)
+				<< "Found pipeline target " << entity->name();
+			target = entity;
+			break;
+		}
+
+		visited.insert(entity);
+
+		/*
+		 * Add direct downstream entities to the search queue. If the
+		 * current entity supports the subdev internal routing API,
+		 * restrict the search to downstream entities reachable through
+		 * active routes.
+		 */
+
+		std::vector<const MediaPad *> srcPads;
+		bool supportsRouting = false;
+
+		if (sinkPad) {
+			srcPads = routedSourcePads(sinkPad);
+			if (!srcPads.empty())
+				supportsRouting = true;
+		}
+
+		if (srcPads.empty()) {
+			for (const MediaPad *pad : entity->pads()) {
+				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
+					continue;
+				srcPads.push_back(pad);
+			}
+		}
+
+		for (const MediaPad *srcPad : srcPads) {
+			for (MediaLink *link : srcPad->links()) {
+				MediaEntity *next = link->sink()->entity();
+				if (visited.find(next) == visited.end()) {
+					queue.push({ next, link->sink() });
+
+					Entity e{ entity, supportsRouting, sinkPad,
+						  srcPad, link };
+					parents.insert({ next, e });
+				}
+			}
+		}
+	}
+
+	if (!target) {
+		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
+		return -ENOLINK;
+	}
+
+	/*
+	 * With the parents, we can follow back our way from the capture device
+	 * to the sensor. Store all the entities in the pipeline, from the
+	 * camera sensor to the video node, in entities_.
+	 */
+	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
+
+	for (auto it = parents.find(entity); it != parents.end();
+	     it = parents.find(entity)) {
+		const Entity &e = it->second;
+		entities_.push_front(e);
+		entity = e.entity;
+	}
+
+	LOG(MediaPipeline, Info)
+		<< "Found pipeline: "
+		<< utils::join(entities_, " -> ",
+			       [](const Entity &e) {
+				       std::string s = "[";
+				       if (e.sink)
+					       s += std::to_string(e.sink->index()) + "|";
+				       s += e.entity->name();
+				       if (e.source)
+					       s += "|" + std::to_string(e.source->index());
+				       s += "]";
+				       return s;
+			       });
+
+	return 0;
+}
+
+/**
+ * \brief Initialise and enable all links through the MediaPipeline
+ * \return 0 on success, or a negative errno otherwise
+ */
+int MediaPipeline::initLinks()
+{
+	MediaLink *sinkLink = nullptr;
+	for (Entity &e : entities_) {
+		/* Sensor entities have no connected sink. */
+		if (!sinkLink) {
+			sinkLink = e.sourceLink;
+			continue;
+		}
+
+		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
+			LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
+
+			int ret = sinkLink->setEnabled(true);
+			if (ret < 0)
+				return ret;
+		}
+
+		sinkLink = e.sourceLink;
+	}
+
+	return 0;
+}
+
+/**
+ * \brief Configure the entities of this MediaPipeline
+ * \param[in] sensor The configured CameraSensor to propogate
+ * \param[inout] format The format to propogate through the pipeline
+ *
+ * Propagate formats through each of the entities of the Pipeline, validating
+ * that each one was not adjusted by the driver from the desired format.
+ *
+ * The format is updated with the final accepted format of the last entity of
+ * the pipeline.
+ *
+ * \return 0 on success or a negative errno otherwise
+ */
+int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
+{
+	int ret;
+
+	for (const Entity &e : entities_) {
+		/* The sensor is configured through the CameraSensor */
+		if (!e.sourceLink)
+			break;
+
+		MediaLink *link = e.sourceLink;
+		MediaPad *source = link->source();
+		MediaPad *sink = link->sink();
+
+		/* 'format' already contains the sensor configuration */
+		if (source->entity() != sensor->entity()) {
+			/* todo: Add MediaDevice cache to reduce FD pressure */
+			V4L2Subdevice subdev(source->entity());
+			ret = subdev.open();
+			if (ret)
+				return ret;
+
+			ret = subdev.getFormat(source->index(), format);
+			if (ret < 0)
+				return ret;
+		}
+
+		V4L2SubdeviceFormat sourceFormat = *format;
+		/* todo: Add MediaDevice cache to reduce FD pressure */
+		V4L2Subdevice subdev(sink->entity());
+		ret = subdev.open();
+		if (ret)
+			return ret;
+
+		ret = subdev.setFormat(sink->index(), format);
+		if (ret < 0)
+			return ret;
+
+		if (format->code != sourceFormat.code ||
+		    format->size != sourceFormat.size) {
+			LOG(MediaPipeline, Debug)
+				<< "Source '" << *source
+				<< " produces " << sourceFormat
+				<< ", sink '" << *sink
+				<< " requires " << format;
+			return -EINVAL;
+		}
+
+		LOG(MediaPipeline, Debug)
+			<< "Link " << *link << " configured with format "
+			<< format;
+	}
+
+	return 0;
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
index aa9ab0291854..2c0f8603b231 100644
--- a/src/libcamera/meson.build
+++ b/src/libcamera/meson.build
@@ -41,6 +41,7 @@  libcamera_internal_sources = files([
     'mapped_framebuffer.cpp',
     'media_device.cpp',
     'media_object.cpp',
+    'media_pipeline.cpp',
     'pipeline_handler.cpp',
     'process.cpp',
     'pub_key.cpp',