[3/4] libcamera: internal: Add MediaPipeline helper
diff mbox series

Message ID 20240916140241.47845-4-kieran.bingham@ideasonboard.com
State Superseded
Headers show
Series
  • MediaPipeline: Complex input device support
Related show

Commit Message

Kieran Bingham Sept. 16, 2024, 2:02 p.m. UTC
Provide a MediaPipeline class to help identifing and managing pipelines across
a MediaDevice graph.

Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
---
 include/libcamera/internal/media_pipeline.h |  60 ++++
 include/libcamera/internal/meson.build      |   1 +
 src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++
 src/libcamera/meson.build                   |   1 +
 4 files changed, 363 insertions(+)
 create mode 100644 include/libcamera/internal/media_pipeline.h
 create mode 100644 src/libcamera/media_pipeline.cpp

Comments

Umang Jain Sept. 27, 2024, 3:55 p.m. UTC | #1
Hi Kieran,

Thank you for the patch.

On 16/09/24 7:32 pm, Kieran Bingham wrote:
> Provide a MediaPipeline class to help identifing and managing pipelines across
> a MediaDevice graph.
>
> Signed-off-by: Kieran Bingham<kieran.bingham@ideasonboard.com>
> ---
>   include/libcamera/internal/media_pipeline.h |  60 ++++
>   include/libcamera/internal/meson.build      |   1 +
>   src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++
>   src/libcamera/meson.build                   |   1 +
>   4 files changed, 363 insertions(+)
>   create mode 100644 include/libcamera/internal/media_pipeline.h
>   create mode 100644 src/libcamera/media_pipeline.cpp
>
> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> new file mode 100644
> index 000000000000..be8a611e20ca
> --- /dev/null
> +++ b/include/libcamera/internal/media_pipeline.h
> @@ -0,0 +1,60 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline handler
> + */
> +
> +#pragma once
> +
> +#include <list>
> +#include <string>
> +
> +#include <libcamera/base/log.h>
> +
> +namespace libcamera {
> +
> +class CameraSensor;
> +class MediaEntity;
> +class MediaLink;
> +class MediaPad;
> +struct V4L2SubdeviceFormat;
> +
> +class MediaPipeline
> +{
> +public:
> +	int init(MediaEntity *source, std::string sink);
> +	int initLinks();
> +	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);
> +
> +private:
> +	struct Entity {
> +		/* The media entity, always valid. */
> +		MediaEntity *entity;
> +		/*
> +		 * Whether or not the entity is a subdev that supports the
> +		 * routing API.
> +		 */
> +		bool supportsRouting;
> +		/*
> +		 * The local sink pad connected to the upstream entity, null for
> +		 * the camera sensor at the beginning of the pipeline.
> +		 */
> +		const MediaPad *sink;
> +		/*
> +		 * The local source pad connected to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		const MediaPad *source;
> +		/*
> +		 * The link on the source pad, to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		MediaLink *sourceLink;
> +	};
> +
> +	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> +	std::list<Entity> entities_;
> +};
> +
> +} /* namespace libcamera */
> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> index 1c5eef9cab80..60a35d3e0357 100644
> --- a/include/libcamera/internal/meson.build
> +++ b/include/libcamera/internal/meson.build
> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
>       'mapped_framebuffer.h',
>       'media_device.h',
>       'media_object.h',
> +    'media_pipeline.h',
>       'pipeline_handler.h',
>       'process.h',
>       'pub_key.h',
> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> new file mode 100644
> index 000000000000..e14cc1ff4773
> --- /dev/null
> +++ b/src/libcamera/media_pipeline.cpp
> @@ -0,0 +1,301 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline support
> + */
> +
> +#include "libcamera/internal/media_pipeline.h"
> +
> +#include <algorithm>
> +#include <errno.h>
> +#include <queue>
> +#include <string>
> +#include <unordered_map>
> +#include <unordered_set>
> +#include <vector>
> +
> +#include <linux/media.h>
> +
> +#include <libcamera/base/log.h>
> +
> +#include "libcamera/internal/camera_sensor.h"
> +#include "libcamera/internal/media_device.h"
> +#include "libcamera/internal/media_object.h"
> +#include "libcamera/internal/v4l2_subdevice.h"
> +
> +/**
> + * \file media_pipeline.h
> + * \brief Provide a representation of a pipeline of devices using the Media
> + * Controller.
> + */
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(MediaPipeline)
> +
> +/**
> + * \class MediaPipeline
> + * \brief The MediaPipeline represents a set of entities that together form a
> + * data path for stream data.
> + *
> + * A MediaPipeline instance is constructed from a sink and a source between
> + * two entities in a media graph.
> + */
> +
> +/**
> + * \brief Retrieve all source pads connected to a sink pad through active routes
> + *
> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> + * source pads which are connected with an active route to the sink pad.
> + *
> + * \return A vector of source MediaPads
> + */
> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> +{
> +	MediaEntity *entity = sink->entity();
> +	std::unique_ptr<V4L2Subdevice> subdev =
> +		std::make_unique<V4L2Subdevice>(entity);
> +
> +	int ret = subdev->open();
> +	if (ret < 0)
> +		return {};
> +
> +	V4L2Subdevice::Routing routing = {};
> +	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> +	if (ret < 0)
> +		return {};
> +
> +	std::vector<const MediaPad *> pads;
> +
> +	for (const V4L2Subdevice::Route &route : routing) {
> +		if (sink->index() != route.sink.pad ||
> +		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> +			continue;
> +
> +		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> +		if (!pad) {
> +			LOG(MediaPipeline, Warning)
> +				<< "Entity " << entity->name()
> +				<< " has invalid route source pad "
> +				<< route.source.pad;
> +		}
> +
> +		pads.push_back(pad);
> +	}
> +
> +	return pads;
> +}
> +
> +/**
> + * \brief Find the path from source to sink
> + *
> + * Starting from a source entity, deteremine the shortest path to the

s/deteremine/determine
> + * target described by sink.
> + *
> + * If sink can not be found, or a route from source to sink can not be achieved
> + * an error of -ENOLINK will be returned.
> + *
> + * When successful, the MediaPipeline will internally store the representation
> + * of entities and links to describe the path between the two entities.
> + *
> + * \return 0 on success, a negative errno otherwise
> + */
> +int MediaPipeline::init(MediaEntity *source, std::string sink)
> +{
> +	/*
> +	 * Find the shortest path between from the Camera Sensor and the
> +	 * target entity.
> +	 */
> +	std::unordered_set<MediaEntity *> visited;
> +	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> +
> +	/* Remember at each entity where we came from. */
> +	std::unordered_map<MediaEntity *, Entity> parents;
> +	MediaEntity *entity = nullptr;
> +	MediaEntity *target = nullptr;
> +	MediaPad *sinkPad;
> +
> +	queue.push({ source, nullptr });
> +
> +	while (!queue.empty()) {
> +		std::tie(entity, sinkPad) = queue.front();
> +		queue.pop();
> +
> +		/* Found the target device. */
> +		if (entity->name() == sink) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Found Pipeline target " << entity->name();
> +			target = entity;
> +			break;
> +		}
> +
> +		visited.insert(entity);
> +
> +		/*
> +		 * Add direct downstream entities to the search queue. If the
> +		 * current entity supports the subdev internal routing API,
> +		 * restrict the search to downstream entities reachable through
> +		 * active routes.
> +		 */
> +
> +		std::vector<const MediaPad *> pads;

all these are source pads, so I would be suggest `srcPads`
> +		bool supportsRouting = false;
> +
> +		if (sinkPad) {
> +			pads = routedSourcePads(sinkPad);
> +			if (!pads.empty())
> +				supportsRouting = true;
> +		}
> +
> +		if (pads.empty()) {
> +			for (const MediaPad *pad : entity->pads()) {
> +				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> +					continue;
> +				pads.push_back(pad);
> +			}
> +		}
> +
> +		for (const MediaPad *pad : pads) {

hence, I would suggest

+		for (const MediaPad *srcPad : srcPads) {

> +			for (MediaLink *link : pad->links()) {
> +				MediaEntity *next = link->sink()->entity();
> +				if (visited.find(next) == visited.end()) {
> +					queue.push({ next, link->sink() });
> +
> +					Entity e{ entity, supportsRouting, sinkPad, pad, link };
> +					parents.insert({ next, e });
> +				}
> +			}
> +		}
> +	}
> +
> +	if (!target) {
> +		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> +		return -ENOLINK;
> +	}
> +
> +	/*
> +	 * With the parents, we can follow back our way from the capture device
> +	 * to the sensor. Store all the entities in the pipeline, from the
> +	 * camera sensor to the video node, in entities_.
> +	 */
> +	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> +
> +	for (auto it = parents.find(entity); it != parents.end();
> +	     it = parents.find(entity)) {
> +		const Entity &e = it->second;
> +		entities_.push_front(e);
> +		entity = e.entity;
> +	}
> +
> +	LOG(MediaPipeline, Info)
> +		<< "Found pipeline: "
> +		<< utils::join(entities_, " -> ",
> +			       [](const Entity &e) {
> +				       std::string s = "[";
> +				       if (e.sink)
> +					       s += std::to_string(e.sink->index()) + "|";
> +				       s += e.entity->name();
> +				       if (e.source)
> +					       s += "|" + std::to_string(e.source->index());
> +				       s += "]";
> +				       return s;
> +			       });
> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Initialise and enable all links through the MediaPipeline
> + * \return 0 on success, or a negative errno otherwise
> + */
> +int MediaPipeline::initLinks()
> +{
> +	int ret = 0;
> +
> +	MediaLink *sinkLink = nullptr;
> +	for (Entity &e : entities_) {
> +		/* Sensor entities have no connected sink. */
> +		if (!sinkLink) {
> +			sinkLink = e.sourceLink;
> +			continue;
> +		}
> +
> +		LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;

Should this be logged inside the `if` block below, possibly after the 
setEnabled is successful ?

Otherwise, this will be logged for all sinkLink(s) which are already 
MEDIA_LNK_FL_ENABLED
> +
> +		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> +			ret = sinkLink->setEnabled(true);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		sinkLink = e.sourceLink;
> +	}
> +
> +	return ret;
> +}
> +
> +/**
> + * \brief Configure the entities of this MediaPipeline
> + *
> + * Propagate formats through each of the entities of the Pipeline, validating
> + * that each one was not adjusted by the driver from the desired format.
> + *
> + * \return 0 on success or a negative errno otherwise
> + */
> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
> +{
> +	int ret;
> +
> +	for (const Entity &e : entities_) {
> +		/* The sensor is configured through the CameraSensor */
> +		if (!e.sourceLink)
> +			break;
> +
> +		MediaLink *link = e.sourceLink;
> +		MediaPad *source = link->source();
> +		MediaPad *sink = link->sink();
> +
> +		/* 'format' already contains the sensor configuration */

This is only true for the first iteration, 'format' will get updated as 
you traverse down the pipeline, no ?

This comment either needs an update or should be removed.
> +		if (source->entity() != sensor->entity()) {
> +			/* Todo: Add MediaDevice cache to reduce FD pressure */
> +			V4L2Subdevice subdev(source->entity());
> +			ret = subdev.open();
> +			if (ret)
> +				return ret;
> +
> +			ret = subdev.getFormat(source->index(), format);

I feel a bit uncomfortable here. 'format' is a sensor format and can get 
updated here ?
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		V4L2SubdeviceFormat sourceFormat = *format;
> +		/* Todo: Add MediaDevice cache to reduce FD pressure */
> +		V4L2Subdevice subdev(sink->entity());
> +		ret = subdev.open();
> +		if (ret)
> +			return ret;
> +
> +		ret = subdev.setFormat(sink->index(), format);

and can get updated here as well ? How will it reflect for the caller of 
the function, if the sensor format pointer gets updated here.

I think the scope for 'format' should be local to this function. Both 
the parameters passed to the function should become const.
> +		if (ret < 0)
> +			return ret;
> +
> +		if (format->code != sourceFormat.code ||
> +		    format->size != sourceFormat.size) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Source '" << *source
> +				<< " produces " << sourceFormat
> +				<< ", sink '" << *sink
> +				<< " requires " << format;
> +			return -EINVAL;
> +		}
> +
> +		LOG(MediaPipeline, Debug)
> +			<< "Link " << *link << " configured with format "
> +			<< format;
> +	}
> +
> +	return 0;
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> index aa9ab0291854..2c0f8603b231 100644
> --- a/src/libcamera/meson.build
> +++ b/src/libcamera/meson.build
> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
>       'mapped_framebuffer.cpp',
>       'media_device.cpp',
>       'media_object.cpp',
> +    'media_pipeline.cpp',
>       'pipeline_handler.cpp',
>       'process.cpp',
>       'pub_key.cpp',
Jacopo Mondi Sept. 27, 2024, 8:40 p.m. UTC | #2
Hi Kieran

On Mon, Sep 16, 2024 at 04:02:40PM GMT, Kieran Bingham wrote:
> Provide a MediaPipeline class to help identifing and managing pipelines across
> a MediaDevice graph.
>
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> ---
>  include/libcamera/internal/media_pipeline.h |  60 ++++
>  include/libcamera/internal/meson.build      |   1 +
>  src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++
>  src/libcamera/meson.build                   |   1 +
>  4 files changed, 363 insertions(+)
>  create mode 100644 include/libcamera/internal/media_pipeline.h
>  create mode 100644 src/libcamera/media_pipeline.cpp
>
> diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> new file mode 100644
> index 000000000000..be8a611e20ca
> --- /dev/null
> +++ b/include/libcamera/internal/media_pipeline.h
> @@ -0,0 +1,60 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline handler
> + */
> +
> +#pragma once
> +
> +#include <list>
> +#include <string>

missing <vector>

> +
> +#include <libcamera/base/log.h>

What for ?

> +
> +namespace libcamera {
> +
> +class CameraSensor;
> +class MediaEntity;
> +class MediaLink;
> +class MediaPad;
> +struct V4L2SubdeviceFormat;
> +
> +class MediaPipeline
> +{
> +public:
> +	int init(MediaEntity *source, std::string sink);
> +	int initLinks();
> +	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);
> +
> +private:
> +	struct Entity {
> +		/* The media entity, always valid. */
> +		MediaEntity *entity;
> +		/*
> +		 * Whether or not the entity is a subdev that supports the
> +		 * routing API.
> +		 */
> +		bool supportsRouting;
> +		/*
> +		 * The local sink pad connected to the upstream entity, null for
> +		 * the camera sensor at the beginning of the pipeline.
> +		 */
> +		const MediaPad *sink;
> +		/*
> +		 * The local source pad connected to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		const MediaPad *source;
> +		/*
> +		 * The link on the source pad, to the downstream entity, null
> +		 * for the video node at the end of the pipeline.
> +		 */
> +		MediaLink *sourceLink;
> +	};
> +
> +	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> +	std::list<Entity> entities_;
> +};
> +
> +} /* namespace libcamera */
> diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> index 1c5eef9cab80..60a35d3e0357 100644
> --- a/include/libcamera/internal/meson.build
> +++ b/include/libcamera/internal/meson.build
> @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
>      'mapped_framebuffer.h',
>      'media_device.h',
>      'media_object.h',
> +    'media_pipeline.h',
>      'pipeline_handler.h',
>      'process.h',
>      'pub_key.h',
> diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> new file mode 100644
> index 000000000000..e14cc1ff4773
> --- /dev/null
> +++ b/src/libcamera/media_pipeline.cpp
> @@ -0,0 +1,301 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2024, Ideas on Board Oy
> + *
> + * Media pipeline support
> + */
> +
> +#include "libcamera/internal/media_pipeline.h"
> +
> +#include <algorithm>
> +#include <errno.h>

missing <memory> for unique_ptr<>
missing <tuple> for std::tuple and std::tie

> +#include <queue>
> +#include <string>

Included in the header

> +#include <unordered_map>
> +#include <unordered_set>
> +#include <vector>

Will be included in the header

> +
> +#include <linux/media.h>
> +
> +#include <libcamera/base/log.h>
> +
> +#include "libcamera/internal/camera_sensor.h"
> +#include "libcamera/internal/media_device.h"
> +#include "libcamera/internal/media_object.h"
> +#include "libcamera/internal/v4l2_subdevice.h"
> +
> +/**
> + * \file media_pipeline.h
> + * \brief Provide a representation of a pipeline of devices using the Media
> + * Controller.

not '.' at the end of briefs

> + */
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(MediaPipeline)
> +
> +/**
> + * \class MediaPipeline
> + * \brief The MediaPipeline represents a set of entities that together form a
> + * data path for stream data.

no '.' at the end of briefs

what a "stream data" ? Should it be "data stream" ?

> + *
> + * A MediaPipeline instance is constructed from a sink and a source between
> + * two entities in a media graph.

Doesn't a pipeline spans for the whole media graph instead than just
between two entities ?

> + */
> +
> +/**
> + * \brief Retrieve all source pads connected to a sink pad through active routes

\param[in] sink

I wonder why I don't get a warning from doxygen..

> + *
> + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> + * source pads which are connected with an active route to the sink pad.
> + *
> + * \return A vector of source MediaPads
> + */
> +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> +{
> +	MediaEntity *entity = sink->entity();
> +	std::unique_ptr<V4L2Subdevice> subdev =
> +		std::make_unique<V4L2Subdevice>(entity);
> +
> +	int ret = subdev->open();
> +	if (ret < 0)
> +		return {};
> +
> +	V4L2Subdevice::Routing routing = {};
> +	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> +	if (ret < 0)
> +		return {};
> +
> +	std::vector<const MediaPad *> pads;
> +
> +	for (const V4L2Subdevice::Route &route : routing) {
> +		if (sink->index() != route.sink.pad ||
> +		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> +			continue;
> +
> +		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> +		if (!pad) {
> +			LOG(MediaPipeline, Warning)
> +				<< "Entity " << entity->name()
> +				<< " has invalid route source pad "
> +				<< route.source.pad;
> +		}
> +
> +		pads.push_back(pad);
> +	}
> +
> +	return pads;
> +}
> +
> +/**
> + * \brief Find the path from source to sink

missing \param

> + *
> + * Starting from a source entity, deteremine the shortest path to the

s/deteremine/determine

> + * target described by sink.

s/target described by// ?

> + *
> + * If sink can not be found, or a route from source to sink can not be achieved
> + * an error of -ENOLINK will be returned.
> + *
> + * When successful, the MediaPipeline will internally store the representation
> + * of entities and links to describe the path between the two entities.
> + *
> + * \return 0 on success, a negative errno otherwise
> + */
> +int MediaPipeline::init(MediaEntity *source, std::string sink)
> +{
> +	/*
> +	 * Find the shortest path between from the Camera Sensor and the
> +	 * target entity.

I know where this comment comes from, but this is not a CameraSensor,
maybe just use "source" ?

> +	 */
> +	std::unordered_set<MediaEntity *> visited;
> +	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> +
> +	/* Remember at each entity where we came from. */
> +	std::unordered_map<MediaEntity *, Entity> parents;
> +	MediaEntity *entity = nullptr;
> +	MediaEntity *target = nullptr;
> +	MediaPad *sinkPad;
> +
> +	queue.push({ source, nullptr });
> +
> +	while (!queue.empty()) {
> +		std::tie(entity, sinkPad) = queue.front();
> +		queue.pop();
> +
> +		/* Found the target device. */
> +		if (entity->name() == sink) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Found Pipeline target " << entity->name();

s/Pipeline/pipeline

> +			target = entity;
> +			break;
> +		}
> +
> +		visited.insert(entity);
> +
> +		/*
> +		 * Add direct downstream entities to the search queue. If the
> +		 * current entity supports the subdev internal routing API,
> +		 * restrict the search to downstream entities reachable through
> +		 * active routes.
> +		 */
> +
> +		std::vector<const MediaPad *> pads;
> +		bool supportsRouting = false;
> +
> +		if (sinkPad) {
> +			pads = routedSourcePads(sinkPad);
> +			if (!pads.empty())
> +				supportsRouting = true;
> +		}
> +
> +		if (pads.empty()) {
> +			for (const MediaPad *pad : entity->pads()) {
> +				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> +					continue;
> +				pads.push_back(pad);
> +			}
> +		}
> +
> +		for (const MediaPad *pad : pads) {
> +			for (MediaLink *link : pad->links()) {
> +				MediaEntity *next = link->sink()->entity();
> +				if (visited.find(next) == visited.end()) {
> +					queue.push({ next, link->sink() });
> +
> +					Entity e{ entity, supportsRouting, sinkPad, pad, link };
> +					parents.insert({ next, e });
> +				}
> +			}
> +		}
> +	}
> +
> +	if (!target) {
> +		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> +		return -ENOLINK;
> +	}
> +
> +	/*
> +	 * With the parents, we can follow back our way from the capture device
> +	 * to the sensor. Store all the entities in the pipeline, from the
> +	 * camera sensor to the video node, in entities_.
> +	 */
> +	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> +
> +	for (auto it = parents.find(entity); it != parents.end();
> +	     it = parents.find(entity)) {
> +		const Entity &e = it->second;
> +		entities_.push_front(e);
> +		entity = e.entity;
> +	}

I'll skip review assuming this is a copy of the implementation in
simple.cpp

> +
> +	LOG(MediaPipeline, Info)
> +		<< "Found pipeline: "
> +		<< utils::join(entities_, " -> ",
> +			       [](const Entity &e) {
> +				       std::string s = "[";
> +				       if (e.sink)
> +					       s += std::to_string(e.sink->index()) + "|";
> +				       s += e.entity->name();
> +				       if (e.source)
> +					       s += "|" + std::to_string(e.source->index());
> +				       s += "]";
> +				       return s;
> +			       });
> +
> +	return 0;
> +}
> +
> +/**
> + * \brief Initialise and enable all links through the MediaPipeline
> + * \return 0 on success, or a negative errno otherwise
> + */
> +int MediaPipeline::initLinks()
> +{
> +	int ret = 0;
> +
> +	MediaLink *sinkLink = nullptr;
> +	for (Entity &e : entities_) {
> +		/* Sensor entities have no connected sink. */
> +		if (!sinkLink) {
> +			sinkLink = e.sourceLink;
> +			continue;
> +		}
> +
> +		LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
> +
> +		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> +			ret = sinkLink->setEnabled(true);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		sinkLink = e.sourceLink;
> +	}
> +
> +	return ret;

I presume you can return 0 and declare ret inside the above loop

> +}
> +
> +/**
> + * \brief Configure the entities of this MediaPipeline

missing \params

> + *
> + * Propagate formats through each of the entities of the Pipeline, validating
> + * that each one was not adjusted by the driver from the desired format.
> + *
> + * \return 0 on success or a negative errno otherwise
> + */
> +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
> +{
> +	int ret;
> +
> +	for (const Entity &e : entities_) {
> +		/* The sensor is configured through the CameraSensor */
> +		if (!e.sourceLink)
> +			break;
> +
> +		MediaLink *link = e.sourceLink;
> +		MediaPad *source = link->source();
> +		MediaPad *sink = link->sink();
> +
> +		/* 'format' already contains the sensor configuration */
> +		if (source->entity() != sensor->entity()) {
> +			/* Todo: Add MediaDevice cache to reduce FD pressure */

s/Todo/\todo/

> +			V4L2Subdevice subdev(source->entity());
> +			ret = subdev.open();

Trying to open a subdev multiple times return -EBUSY, if this function
is meant to be used by pipeline handlers they should be careful in
avoiding contentions.

> +			if (ret)
> +				return ret;
> +
> +			ret = subdev.getFormat(source->index(), format);
> +			if (ret < 0)
> +				return ret;
> +		}
> +
> +		V4L2SubdeviceFormat sourceFormat = *format;
> +		/* Todo: Add MediaDevice cache to reduce FD pressure */

s/Todo/\todo/

> +		V4L2Subdevice subdev(sink->entity());
> +		ret = subdev.open();
> +		if (ret)
> +			return ret;
> +
> +		ret = subdev.setFormat(sink->index(), format);
> +		if (ret < 0)
> +			return ret;
> +
> +		if (format->code != sourceFormat.code ||
> +		    format->size != sourceFormat.size) {
> +			LOG(MediaPipeline, Debug)
> +				<< "Source '" << *source
> +				<< " produces " << sourceFormat
> +				<< ", sink '" << *sink
> +				<< " requires " << format;
> +			return -EINVAL;
> +		}
> +
> +		LOG(MediaPipeline, Debug)
> +			<< "Link " << *link << " configured with format "
> +			<< format;
> +	}
> +
> +	return 0;
> +}
> +
> +} /* namespace libcamera */
> diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> index aa9ab0291854..2c0f8603b231 100644
> --- a/src/libcamera/meson.build
> +++ b/src/libcamera/meson.build
> @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
>      'mapped_framebuffer.cpp',
>      'media_device.cpp',
>      'media_object.cpp',
> +    'media_pipeline.cpp',
>      'pipeline_handler.cpp',
>      'process.cpp',
>      'pub_key.cpp',
> --
> 2.46.0
>
Kieran Bingham Oct. 2, 2024, 2:58 p.m. UTC | #3
Quoting Umang Jain (2024-09-27 16:55:38)
> 
> Hi Kieran,
> 
> Thank you for the patch.
> 
> On 16/09/24 7:32 pm, Kieran Bingham wrote:
> > Provide a MediaPipeline class to help identifing and managing pipelines across
> > a MediaDevice graph.
> >
> > Signed-off-by: Kieran Bingham<kieran.bingham@ideasonboard.com>
> > ---
> >   include/libcamera/internal/media_pipeline.h |  60 ++++
> >   include/libcamera/internal/meson.build      |   1 +
> >   src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++
> >   src/libcamera/meson.build                   |   1 +
> >   4 files changed, 363 insertions(+)
> >   create mode 100644 include/libcamera/internal/media_pipeline.h
> >   create mode 100644 src/libcamera/media_pipeline.cpp
> >
> > diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> > new file mode 100644
> > index 000000000000..be8a611e20ca
> > --- /dev/null
> > +++ b/include/libcamera/internal/media_pipeline.h
> > @@ -0,0 +1,60 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2024, Ideas on Board Oy
> > + *
> > + * Media pipeline handler
> > + */
> > +
> > +#pragma once
> > +
> > +#include <list>
> > +#include <string>
> > +
> > +#include <libcamera/base/log.h>
> > +
> > +namespace libcamera {
> > +
> > +class CameraSensor;
> > +class MediaEntity;
> > +class MediaLink;
> > +class MediaPad;
> > +struct V4L2SubdeviceFormat;
> > +
> > +class MediaPipeline
> > +{
> > +public:
> > +     int init(MediaEntity *source, std::string sink);
> > +     int initLinks();
> > +     int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);
> > +
> > +private:
> > +     struct Entity {
> > +             /* The media entity, always valid. */
> > +             MediaEntity *entity;
> > +             /*
> > +              * Whether or not the entity is a subdev that supports the
> > +              * routing API.
> > +              */
> > +             bool supportsRouting;
> > +             /*
> > +              * The local sink pad connected to the upstream entity, null for
> > +              * the camera sensor at the beginning of the pipeline.
> > +              */
> > +             const MediaPad *sink;
> > +             /*
> > +              * The local source pad connected to the downstream entity, null
> > +              * for the video node at the end of the pipeline.
> > +              */
> > +             const MediaPad *source;
> > +             /*
> > +              * The link on the source pad, to the downstream entity, null
> > +              * for the video node at the end of the pipeline.
> > +              */
> > +             MediaLink *sourceLink;
> > +     };
> > +
> > +     std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> > +     std::list<Entity> entities_;
> > +};
> > +
> > +} /* namespace libcamera */
> > diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> > index 1c5eef9cab80..60a35d3e0357 100644
> > --- a/include/libcamera/internal/meson.build
> > +++ b/include/libcamera/internal/meson.build
> > @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
> >       'mapped_framebuffer.h',
> >       'media_device.h',
> >       'media_object.h',
> > +    'media_pipeline.h',
> >       'pipeline_handler.h',
> >       'process.h',
> >       'pub_key.h',
> > diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> > new file mode 100644
> > index 000000000000..e14cc1ff4773
> > --- /dev/null
> > +++ b/src/libcamera/media_pipeline.cpp
> > @@ -0,0 +1,301 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2024, Ideas on Board Oy
> > + *
> > + * Media pipeline support
> > + */
> > +
> > +#include "libcamera/internal/media_pipeline.h"
> > +
> > +#include <algorithm>
> > +#include <errno.h>
> > +#include <queue>
> > +#include <string>
> > +#include <unordered_map>
> > +#include <unordered_set>
> > +#include <vector>
> > +
> > +#include <linux/media.h>
> > +
> > +#include <libcamera/base/log.h>
> > +
> > +#include "libcamera/internal/camera_sensor.h"
> > +#include "libcamera/internal/media_device.h"
> > +#include "libcamera/internal/media_object.h"
> > +#include "libcamera/internal/v4l2_subdevice.h"
> > +
> > +/**
> > + * \file media_pipeline.h
> > + * \brief Provide a representation of a pipeline of devices using the Media
> > + * Controller.
> > + */
> > +
> > +namespace libcamera {
> > +
> > +LOG_DEFINE_CATEGORY(MediaPipeline)
> > +
> > +/**
> > + * \class MediaPipeline
> > + * \brief The MediaPipeline represents a set of entities that together form a
> > + * data path for stream data.
> > + *
> > + * A MediaPipeline instance is constructed from a sink and a source between
> > + * two entities in a media graph.
> > + */
> > +
> > +/**
> > + * \brief Retrieve all source pads connected to a sink pad through active routes
> > + *
> > + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> > + * source pads which are connected with an active route to the sink pad.
> > + *
> > + * \return A vector of source MediaPads
> > + */
> > +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> > +{
> > +     MediaEntity *entity = sink->entity();
> > +     std::unique_ptr<V4L2Subdevice> subdev =
> > +             std::make_unique<V4L2Subdevice>(entity);
> > +
> > +     int ret = subdev->open();
> > +     if (ret < 0)
> > +             return {};
> > +
> > +     V4L2Subdevice::Routing routing = {};
> > +     ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> > +     if (ret < 0)
> > +             return {};
> > +
> > +     std::vector<const MediaPad *> pads;
> > +
> > +     for (const V4L2Subdevice::Route &route : routing) {
> > +             if (sink->index() != route.sink.pad ||
> > +                 !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> > +                     continue;
> > +
> > +             const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> > +             if (!pad) {
> > +                     LOG(MediaPipeline, Warning)
> > +                             << "Entity " << entity->name()
> > +                             << " has invalid route source pad "
> > +                             << route.source.pad;
> > +             }
> > +
> > +             pads.push_back(pad);
> > +     }
> > +
> > +     return pads;
> > +}
> > +
> > +/**
> > + * \brief Find the path from source to sink
> > + *
> > + * Starting from a source entity, deteremine the shortest path to the
> 
> s/deteremine/determine

Ack.

> > + * target described by sink.
> > + *
> > + * If sink can not be found, or a route from source to sink can not be achieved
> > + * an error of -ENOLINK will be returned.
> > + *
> > + * When successful, the MediaPipeline will internally store the representation
> > + * of entities and links to describe the path between the two entities.
> > + *
> > + * \return 0 on success, a negative errno otherwise
> > + */
> > +int MediaPipeline::init(MediaEntity *source, std::string sink)
> > +{
> > +     /*
> > +      * Find the shortest path between from the Camera Sensor and the
> > +      * target entity.
> > +      */
> > +     std::unordered_set<MediaEntity *> visited;
> > +     std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> > +
> > +     /* Remember at each entity where we came from. */
> > +     std::unordered_map<MediaEntity *, Entity> parents;
> > +     MediaEntity *entity = nullptr;
> > +     MediaEntity *target = nullptr;
> > +     MediaPad *sinkPad;
> > +
> > +     queue.push({ source, nullptr });
> > +
> > +     while (!queue.empty()) {
> > +             std::tie(entity, sinkPad) = queue.front();
> > +             queue.pop();
> > +
> > +             /* Found the target device. */
> > +             if (entity->name() == sink) {
> > +                     LOG(MediaPipeline, Debug)
> > +                             << "Found Pipeline target " << entity->name();
> > +                     target = entity;
> > +                     break;
> > +             }
> > +
> > +             visited.insert(entity);
> > +
> > +             /*
> > +              * Add direct downstream entities to the search queue. If the
> > +              * current entity supports the subdev internal routing API,
> > +              * restrict the search to downstream entities reachable through
> > +              * active routes.
> > +              */
> > +
> > +             std::vector<const MediaPad *> pads;
> 
> all these are source pads, so I would be suggest `srcPads`

Ack

> > +             bool supportsRouting = false;
> > +
> > +             if (sinkPad) {
> > +                     pads = routedSourcePads(sinkPad);
> > +                     if (!pads.empty())
> > +                             supportsRouting = true;
> > +             }
> > +
> > +             if (pads.empty()) {
> > +                     for (const MediaPad *pad : entity->pads()) {
> > +                             if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> > +                                     continue;
> > +                             pads.push_back(pad);
> > +                     }
> > +             }
> > +
> > +             for (const MediaPad *pad : pads) {
> 
> hence, I would suggest
> 
> +               for (const MediaPad *srcPad : srcPads) {

Ack

> > +                     for (MediaLink *link : pad->links()) {
> > +                             MediaEntity *next = link->sink()->entity();
> > +                             if (visited.find(next) == visited.end()) {
> > +                                     queue.push({ next, link->sink() });
> > +
> > +                                     Entity e{ entity, supportsRouting, sinkPad, pad, link };
> > +                                     parents.insert({ next, e });
> > +                             }
> > +                     }
> > +             }
> > +     }
> > +
> > +     if (!target) {
> > +             LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> > +             return -ENOLINK;
> > +     }
> > +
> > +     /*
> > +      * With the parents, we can follow back our way from the capture device
> > +      * to the sensor. Store all the entities in the pipeline, from the
> > +      * camera sensor to the video node, in entities_.
> > +      */
> > +     entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> > +
> > +     for (auto it = parents.find(entity); it != parents.end();
> > +          it = parents.find(entity)) {
> > +             const Entity &e = it->second;
> > +             entities_.push_front(e);
> > +             entity = e.entity;
> > +     }
> > +
> > +     LOG(MediaPipeline, Info)
> > +             << "Found pipeline: "
> > +             << utils::join(entities_, " -> ",
> > +                            [](const Entity &e) {
> > +                                    std::string s = "[";
> > +                                    if (e.sink)
> > +                                            s += std::to_string(e.sink->index()) + "|";
> > +                                    s += e.entity->name();
> > +                                    if (e.source)
> > +                                            s += "|" + std::to_string(e.source->index());
> > +                                    s += "]";
> > +                                    return s;
> > +                            });
> > +
> > +     return 0;
> > +}
> > +
> > +/**
> > + * \brief Initialise and enable all links through the MediaPipeline
> > + * \return 0 on success, or a negative errno otherwise
> > + */
> > +int MediaPipeline::initLinks()
> > +{
> > +     int ret = 0;
> > +
> > +     MediaLink *sinkLink = nullptr;
> > +     for (Entity &e : entities_) {
> > +             /* Sensor entities have no connected sink. */
> > +             if (!sinkLink) {
> > +                     sinkLink = e.sourceLink;
> > +                     continue;
> > +             }
> > +
> > +             LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
> 
> Should this be logged inside the `if` block below, possibly after the 
> setEnabled is successful ?
> 
> Otherwise, this will be logged for all sinkLink(s) which are already 
> MEDIA_LNK_FL_ENABLED

I think it's ok to say the report on debug anyway, but I agree it's more
clear to only report links that it's actually making a change on.

I've moved it.

> > +
> > +             if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> > +                     ret = sinkLink->setEnabled(true);
> > +                     if (ret < 0)
> > +                             return ret;
> > +             }
> > +
> > +             sinkLink = e.sourceLink;
> > +     }
> > +
> > +     return ret;
> > +}
> > +
> > +/**
> > + * \brief Configure the entities of this MediaPipeline
> > + *
> > + * Propagate formats through each of the entities of the Pipeline, validating
> > + * that each one was not adjusted by the driver from the desired format.
> > + *
> > + * \return 0 on success or a negative errno otherwise
> > + */
> > +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
> > +{
> > +     int ret;
> > +
> > +     for (const Entity &e : entities_) {
> > +             /* The sensor is configured through the CameraSensor */
> > +             if (!e.sourceLink)
> > +                     break;
> > +
> > +             MediaLink *link = e.sourceLink;
> > +             MediaPad *source = link->source();
> > +             MediaPad *sink = link->sink();
> > +
> > +             /* 'format' already contains the sensor configuration */
> 
> This is only true for the first iteration, 'format' will get updated as 
> you traverse down the pipeline, no ?
> 
> This comment either needs an update or should be removed.

Correct - it's only on the first iteration.

> > +             if (source->entity() != sensor->entity()) {
> > +                     /* Todo: Add MediaDevice cache to reduce FD pressure */
> > +                     V4L2Subdevice subdev(source->entity());
> > +                     ret = subdev.open();
> > +                     if (ret)
> > +                             return ret;
> > +
> > +                     ret = subdev.getFormat(source->index(), format);
> 
> I feel a bit uncomfortable here. 'format' is a sensor format and can get 
> updated here ?

It's the format that we're propogating forwards through the pipeline.

It's ok for it to get changed, and desirable that it should tell the
caller what it finished upon at the end.

We can't return the final format configured on the end of the Pipeline
as we use the return value for errors.


> > +                     if (ret < 0)
> > +                             return ret;
> > +             }
> > +
> > +             V4L2SubdeviceFormat sourceFormat = *format;
> > +             /* Todo: Add MediaDevice cache to reduce FD pressure */
> > +             V4L2Subdevice subdev(sink->entity());
> > +             ret = subdev.open();
> > +             if (ret)
> > +                     return ret;
> > +
> > +             ret = subdev.setFormat(sink->index(), format);
> 
> and can get updated here as well ? How will it reflect for the caller of 
> the function, if the sensor format pointer gets updated here.
> 
> I think the scope for 'format' should be local to this function. Both 
> the parameters passed to the function should become const.

The purpose of this is to help with format propogation from the start of
a pipeline to the end, so it's intentional.

I'll see if I can make that clearer in the function documentation or
commit messages or class documentation.



> > +             if (ret < 0)
> > +                     return ret;
> > +
> > +             if (format->code != sourceFormat.code ||
> > +                 format->size != sourceFormat.size) {
> > +                     LOG(MediaPipeline, Debug)
> > +                             << "Source '" << *source
> > +                             << " produces " << sourceFormat
> > +                             << ", sink '" << *sink
> > +                             << " requires " << format;
> > +                     return -EINVAL;
> > +             }
> > +
> > +             LOG(MediaPipeline, Debug)
> > +                     << "Link " << *link << " configured with format "
> > +                     << format;
> > +     }
> > +
> > +     return 0;
> > +}
> > +
> > +} /* namespace libcamera */
> > diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> > index aa9ab0291854..2c0f8603b231 100644
> > --- a/src/libcamera/meson.build
> > +++ b/src/libcamera/meson.build
> > @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
> >       'mapped_framebuffer.cpp',
> >       'media_device.cpp',
> >       'media_object.cpp',
> > +    'media_pipeline.cpp',
> >       'pipeline_handler.cpp',
> >       'process.cpp',
> >       'pub_key.cpp',
>
Kieran Bingham Oct. 3, 2024, 3:21 p.m. UTC | #4
I just found this unsent response which I wrote while updating the patch
for v2.

I've already sent v2, but now sending this (late) for posterity... and
to perhaps pretend that I did answer questions before I sent v2 ... :-)

--
Kieran

Quoting Jacopo Mondi (2024-09-27 21:40:27)
> Hi Kieran
> 
> On Mon, Sep 16, 2024 at 04:02:40PM GMT, Kieran Bingham wrote:
> > Provide a MediaPipeline class to help identifing and managing pipelines across
> > a MediaDevice graph.
> >
> > Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> > ---
> >  include/libcamera/internal/media_pipeline.h |  60 ++++
> >  include/libcamera/internal/meson.build      |   1 +
> >  src/libcamera/media_pipeline.cpp            | 301 ++++++++++++++++++++
> >  src/libcamera/meson.build                   |   1 +
> >  4 files changed, 363 insertions(+)
> >  create mode 100644 include/libcamera/internal/media_pipeline.h
> >  create mode 100644 src/libcamera/media_pipeline.cpp
> >
> > diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
> > new file mode 100644
> > index 000000000000..be8a611e20ca
> > --- /dev/null
> > +++ b/include/libcamera/internal/media_pipeline.h
> > @@ -0,0 +1,60 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2024, Ideas on Board Oy
> > + *
> > + * Media pipeline handler
> > + */
> > +
> > +#pragma once
> > +
> > +#include <list>
> > +#include <string>
> 
> missing <vector>
> 

Added,

> > +
> > +#include <libcamera/base/log.h>
> 
> What for ?

Moved

> > +
> > +namespace libcamera {
> > +
> > +class CameraSensor;
> > +class MediaEntity;
> > +class MediaLink;
> > +class MediaPad;
> > +struct V4L2SubdeviceFormat;
> > +
> > +class MediaPipeline
> > +{
> > +public:
> > +     int init(MediaEntity *source, std::string sink);
> > +     int initLinks();
> > +     int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);
> > +
> > +private:
> > +     struct Entity {
> > +             /* The media entity, always valid. */
> > +             MediaEntity *entity;
> > +             /*
> > +              * Whether or not the entity is a subdev that supports the
> > +              * routing API.
> > +              */
> > +             bool supportsRouting;
> > +             /*
> > +              * The local sink pad connected to the upstream entity, null for
> > +              * the camera sensor at the beginning of the pipeline.
> > +              */
> > +             const MediaPad *sink;
> > +             /*
> > +              * The local source pad connected to the downstream entity, null
> > +              * for the video node at the end of the pipeline.
> > +              */
> > +             const MediaPad *source;
> > +             /*
> > +              * The link on the source pad, to the downstream entity, null
> > +              * for the video node at the end of the pipeline.
> > +              */
> > +             MediaLink *sourceLink;
> > +     };
> > +
> > +     std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
> > +     std::list<Entity> entities_;
> > +};
> > +
> > +} /* namespace libcamera */
> > diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
> > index 1c5eef9cab80..60a35d3e0357 100644
> > --- a/include/libcamera/internal/meson.build
> > +++ b/include/libcamera/internal/meson.build
> > @@ -30,6 +30,7 @@ libcamera_internal_headers = files([
> >      'mapped_framebuffer.h',
> >      'media_device.h',
> >      'media_object.h',
> > +    'media_pipeline.h',
> >      'pipeline_handler.h',
> >      'process.h',
> >      'pub_key.h',
> > diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
> > new file mode 100644
> > index 000000000000..e14cc1ff4773
> > --- /dev/null
> > +++ b/src/libcamera/media_pipeline.cpp
> > @@ -0,0 +1,301 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2024, Ideas on Board Oy
> > + *
> > + * Media pipeline support
> > + */
> > +
> > +#include "libcamera/internal/media_pipeline.h"
> > +
> > +#include <algorithm>
> > +#include <errno.h>
> 
> missing <memory> for unique_ptr<>
> missing <tuple> for std::tuple and std::tie
> 
> > +#include <queue>
> > +#include <string>
> 
> Included in the header
> 
> > +#include <unordered_map>
> > +#include <unordered_set>
> > +#include <vector>
> 
> Will be included in the header
> 
> > +
> > +#include <linux/media.h>
> > +
> > +#include <libcamera/base/log.h>
> > +
> > +#include "libcamera/internal/camera_sensor.h"
> > +#include "libcamera/internal/media_device.h"
> > +#include "libcamera/internal/media_object.h"
> > +#include "libcamera/internal/v4l2_subdevice.h"
> > +
> > +/**
> > + * \file media_pipeline.h
> > + * \brief Provide a representation of a pipeline of devices using the Media
> > + * Controller.
> 
> not '.' at the end of briefs
> 
> > + */
> > +
> > +namespace libcamera {
> > +
> > +LOG_DEFINE_CATEGORY(MediaPipeline)
> > +
> > +/**
> > + * \class MediaPipeline
> > + * \brief The MediaPipeline represents a set of entities that together form a
> > + * data path for stream data.
> 
> no '.' at the end of briefs

Above all handled.

> 
> what a "stream data" ? Should it be "data stream" ?

Hrm ... that would be a data path for a data stream?

Perhaps it could be "form a path for a data stream"?

But neither of those sound better to me :-(

In my head - it's still the 'data-path' for 'stream-data'...

Would hyphens help ?


> > + *
> > + * A MediaPipeline instance is constructed from a sink and a source between
> > + * two entities in a media graph.
> 
> Doesn't a pipeline spans for the whole media graph instead than just
> between two entities ?

No - this can be constructed between any two (connected) points. It
doesn't have to be the full graph.

For instance, in the RKISP1 you'll see that the MediaPipeline is only
used for everything from the Sensor *up to* the CSI2 receiver. After
that - the RKISP1 pipeline handler manages the rest directly.

But this component handles all of the possible input paths even if we
were to add in 5 video mux devices in a long chain, or add 4 cameras to
a single CSI2 receiver through the Arducam 4 camera multiplexor for
instance.


> 
> > + */
> > +
> > +/**
> > + * \brief Retrieve all source pads connected to a sink pad through active routes
> 
> \param[in] sink
> 
> I wonder why I don't get a warning from doxygen..

Hrm ... is it not being built by doxy ?

Looks like I've missed params all the way through.


> > + *
> > + * Examine the entity using the V4L2 Subdevice Routing API to collect all the
> > + * source pads which are connected with an active route to the sink pad.
> > + *
> > + * \return A vector of source MediaPads
> > + */
> > +std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
> > +{
> > +     MediaEntity *entity = sink->entity();
> > +     std::unique_ptr<V4L2Subdevice> subdev =
> > +             std::make_unique<V4L2Subdevice>(entity);
> > +
> > +     int ret = subdev->open();
> > +     if (ret < 0)
> > +             return {};
> > +
> > +     V4L2Subdevice::Routing routing = {};
> > +     ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
> > +     if (ret < 0)
> > +             return {};
> > +
> > +     std::vector<const MediaPad *> pads;
> > +
> > +     for (const V4L2Subdevice::Route &route : routing) {
> > +             if (sink->index() != route.sink.pad ||
> > +                 !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
> > +                     continue;
> > +
> > +             const MediaPad *pad = entity->getPadByIndex(route.source.pad);
> > +             if (!pad) {
> > +                     LOG(MediaPipeline, Warning)
> > +                             << "Entity " << entity->name()
> > +                             << " has invalid route source pad "
> > +                             << route.source.pad;
> > +             }
> > +
> > +             pads.push_back(pad);
> > +     }
> > +
> > +     return pads;
> > +}
> > +
> > +/**
> > + * \brief Find the path from source to sink
> 
> missing \param
> 
> > + *
> > + * Starting from a source entity, deteremine the shortest path to the
> 
> s/deteremine/determine
> 
> > + * target described by sink.
> 
> s/target described by// ?

'sink' is a string name, so it's only the description it's not the
entity.

> 
> > + *
> > + * If sink can not be found, or a route from source to sink can not be achieved
> > + * an error of -ENOLINK will be returned.
> > + *
> > + * When successful, the MediaPipeline will internally store the representation
> > + * of entities and links to describe the path between the two entities.
> > + *
> > + * \return 0 on success, a negative errno otherwise
> > + */
> > +int MediaPipeline::init(MediaEntity *source, std::string sink)
> > +{
> > +     /*
> > +      * Find the shortest path between from the Camera Sensor and the
> > +      * target entity.
> 
> I know where this comment comes from, but this is not a CameraSensor,
> maybe just use "source" ?

Ack.

> 
> > +      */
> > +     std::unordered_set<MediaEntity *> visited;
> > +     std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
> > +
> > +     /* Remember at each entity where we came from. */
> > +     std::unordered_map<MediaEntity *, Entity> parents;
> > +     MediaEntity *entity = nullptr;
> > +     MediaEntity *target = nullptr;
> > +     MediaPad *sinkPad;
> > +
> > +     queue.push({ source, nullptr });
> > +
> > +     while (!queue.empty()) {
> > +             std::tie(entity, sinkPad) = queue.front();
> > +             queue.pop();
> > +
> > +             /* Found the target device. */
> > +             if (entity->name() == sink) {
> > +                     LOG(MediaPipeline, Debug)
> > +                             << "Found Pipeline target " << entity->name();
> 
> s/Pipeline/pipeline
> 
> > +                     target = entity;
> > +                     break;
> > +             }
> > +
> > +             visited.insert(entity);
> > +
> > +             /*
> > +              * Add direct downstream entities to the search queue. If the
> > +              * current entity supports the subdev internal routing API,
> > +              * restrict the search to downstream entities reachable through
> > +              * active routes.
> > +              */
> > +
> > +             std::vector<const MediaPad *> pads;
> > +             bool supportsRouting = false;
> > +
> > +             if (sinkPad) {
> > +                     pads = routedSourcePads(sinkPad);
> > +                     if (!pads.empty())
> > +                             supportsRouting = true;
> > +             }
> > +
> > +             if (pads.empty()) {
> > +                     for (const MediaPad *pad : entity->pads()) {
> > +                             if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
> > +                                     continue;
> > +                             pads.push_back(pad);
> > +                     }
> > +             }
> > +
> > +             for (const MediaPad *pad : pads) {
> > +                     for (MediaLink *link : pad->links()) {
> > +                             MediaEntity *next = link->sink()->entity();
> > +                             if (visited.find(next) == visited.end()) {
> > +                                     queue.push({ next, link->sink() });
> > +
> > +                                     Entity e{ entity, supportsRouting, sinkPad, pad, link };
> > +                                     parents.insert({ next, e });
> > +                             }
> > +                     }
> > +             }
> > +     }
> > +
> > +     if (!target) {
> > +             LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
> > +             return -ENOLINK;
> > +     }
> > +
> > +     /*
> > +      * With the parents, we can follow back our way from the capture device
> > +      * to the sensor. Store all the entities in the pipeline, from the
> > +      * camera sensor to the video node, in entities_.
> > +      */
> > +     entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
> > +
> > +     for (auto it = parents.find(entity); it != parents.end();
> > +          it = parents.find(entity)) {
> > +             const Entity &e = it->second;
> > +             entities_.push_front(e);
> > +             entity = e.entity;
> > +     }
> 
> I'll skip review assuming this is a copy of the implementation in
> simple.cpp

Yes, I also plan/hope to replace the simple.cpp implementation by being
able to call the MediaPipeline helpers - but converting that is a bit
more involved than just adding support to the RKISP1 for the moment.

> > +
> > +     LOG(MediaPipeline, Info)
> > +             << "Found pipeline: "
> > +             << utils::join(entities_, " -> ",
> > +                            [](const Entity &e) {
> > +                                    std::string s = "[";
> > +                                    if (e.sink)
> > +                                            s += std::to_string(e.sink->index()) + "|";
> > +                                    s += e.entity->name();
> > +                                    if (e.source)
> > +                                            s += "|" + std::to_string(e.source->index());
> > +                                    s += "]";
> > +                                    return s;
> > +                            });
> > +
> > +     return 0;
> > +}
> > +
> > +/**
> > + * \brief Initialise and enable all links through the MediaPipeline
> > + * \return 0 on success, or a negative errno otherwise
> > + */
> > +int MediaPipeline::initLinks()
> > +{
> > +     int ret = 0;
> > +
> > +     MediaLink *sinkLink = nullptr;
> > +     for (Entity &e : entities_) {
> > +             /* Sensor entities have no connected sink. */
> > +             if (!sinkLink) {
> > +                     sinkLink = e.sourceLink;
> > +                     continue;
> > +             }
> > +
> > +             LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
> > +
> > +             if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
> > +                     ret = sinkLink->setEnabled(true);
> > +                     if (ret < 0)
> > +                             return ret;
> > +             }
> > +
> > +             sinkLink = e.sourceLink;
> > +     }
> > +
> > +     return ret;
> 
> I presume you can return 0 and declare ret inside the above loop

ack

> 
> > +}
> > +
> > +/**
> > + * \brief Configure the entities of this MediaPipeline
> 
> missing \params
> 
> > + *
> > + * Propagate formats through each of the entities of the Pipeline, validating
> > + * that each one was not adjusted by the driver from the desired format.
> > + *
> > + * \return 0 on success or a negative errno otherwise
> > + */
> > +int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)

Hrm ... this does currently tie MediaPipeline to only handlign from the
CameraSensor forwards.

In the future I could envisage it being other partial sections of a
pipeline - but I think that can be dealt with if/when the need arises.


> > +{
> > +     int ret;
> > +
> > +     for (const Entity &e : entities_) {
> > +             /* The sensor is configured through the CameraSensor */
> > +             if (!e.sourceLink)
> > +                     break;
> > +
> > +             MediaLink *link = e.sourceLink;
> > +             MediaPad *source = link->source();
> > +             MediaPad *sink = link->sink();
> > +
> > +             /* 'format' already contains the sensor configuration */
> > +             if (source->entity() != sensor->entity()) {
> > +                     /* Todo: Add MediaDevice cache to reduce FD pressure */
> 
> s/Todo/\todo/
> 
> > +                     V4L2Subdevice subdev(source->entity());
> > +                     ret = subdev.open();
> 
> Trying to open a subdev multiple times return -EBUSY, if this function
> is meant to be used by pipeline handlers they should be careful in
> avoiding contentions.

Yes, at this scope I think it works - but indeed that's where the todo
above comes in. Simple pipeline handler has a device 'cache' of sorts
which would be helpful to bring in next.

I'd like to do that on top ...


> > +                     if (ret)
> > +                             return ret;
> > +
> > +                     ret = subdev.getFormat(source->index(), format);
> > +                     if (ret < 0)
> > +                             return ret;
> > +             }
> > +
> > +             V4L2SubdeviceFormat sourceFormat = *format;
> > +             /* Todo: Add MediaDevice cache to reduce FD pressure */
> 
> s/Todo/\todo/

Ack.

Thanks.


> 
> > +             V4L2Subdevice subdev(sink->entity());
> > +             ret = subdev.open();
> > +             if (ret)
> > +                     return ret;
> > +
> > +             ret = subdev.setFormat(sink->index(), format);
> > +             if (ret < 0)
> > +                     return ret;
> > +
> > +             if (format->code != sourceFormat.code ||
> > +                 format->size != sourceFormat.size) {
> > +                     LOG(MediaPipeline, Debug)
> > +                             << "Source '" << *source
> > +                             << " produces " << sourceFormat
> > +                             << ", sink '" << *sink
> > +                             << " requires " << format;
> > +                     return -EINVAL;
> > +             }
> > +
> > +             LOG(MediaPipeline, Debug)
> > +                     << "Link " << *link << " configured with format "
> > +                     << format;
> > +     }
> > +
> > +     return 0;
> > +}
> > +
> > +} /* namespace libcamera */
> > diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
> > index aa9ab0291854..2c0f8603b231 100644
> > --- a/src/libcamera/meson.build
> > +++ b/src/libcamera/meson.build
> > @@ -41,6 +41,7 @@ libcamera_internal_sources = files([
> >      'mapped_framebuffer.cpp',
> >      'media_device.cpp',
> >      'media_object.cpp',
> > +    'media_pipeline.cpp',
> >      'pipeline_handler.cpp',
> >      'process.cpp',
> >      'pub_key.cpp',
> > --
> > 2.46.0
> >

Patch
diff mbox series

diff --git a/include/libcamera/internal/media_pipeline.h b/include/libcamera/internal/media_pipeline.h
new file mode 100644
index 000000000000..be8a611e20ca
--- /dev/null
+++ b/include/libcamera/internal/media_pipeline.h
@@ -0,0 +1,60 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2024, Ideas on Board Oy
+ *
+ * Media pipeline handler
+ */
+
+#pragma once
+
+#include <list>
+#include <string>
+
+#include <libcamera/base/log.h>
+
+namespace libcamera {
+
+class CameraSensor;
+class MediaEntity;
+class MediaLink;
+class MediaPad;
+struct V4L2SubdeviceFormat;
+
+class MediaPipeline
+{
+public:
+	int init(MediaEntity *source, std::string sink);
+	int initLinks();
+	int configure(CameraSensor *sensor, V4L2SubdeviceFormat *);
+
+private:
+	struct Entity {
+		/* The media entity, always valid. */
+		MediaEntity *entity;
+		/*
+		 * Whether or not the entity is a subdev that supports the
+		 * routing API.
+		 */
+		bool supportsRouting;
+		/*
+		 * The local sink pad connected to the upstream entity, null for
+		 * the camera sensor at the beginning of the pipeline.
+		 */
+		const MediaPad *sink;
+		/*
+		 * The local source pad connected to the downstream entity, null
+		 * for the video node at the end of the pipeline.
+		 */
+		const MediaPad *source;
+		/*
+		 * The link on the source pad, to the downstream entity, null
+		 * for the video node at the end of the pipeline.
+		 */
+		MediaLink *sourceLink;
+	};
+
+	std::vector<const MediaPad *> routedSourcePads(MediaPad *sink);
+	std::list<Entity> entities_;
+};
+
+} /* namespace libcamera */
diff --git a/include/libcamera/internal/meson.build b/include/libcamera/internal/meson.build
index 1c5eef9cab80..60a35d3e0357 100644
--- a/include/libcamera/internal/meson.build
+++ b/include/libcamera/internal/meson.build
@@ -30,6 +30,7 @@  libcamera_internal_headers = files([
     'mapped_framebuffer.h',
     'media_device.h',
     'media_object.h',
+    'media_pipeline.h',
     'pipeline_handler.h',
     'process.h',
     'pub_key.h',
diff --git a/src/libcamera/media_pipeline.cpp b/src/libcamera/media_pipeline.cpp
new file mode 100644
index 000000000000..e14cc1ff4773
--- /dev/null
+++ b/src/libcamera/media_pipeline.cpp
@@ -0,0 +1,301 @@ 
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2024, Ideas on Board Oy
+ *
+ * Media pipeline support
+ */
+
+#include "libcamera/internal/media_pipeline.h"
+
+#include <algorithm>
+#include <errno.h>
+#include <queue>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include <linux/media.h>
+
+#include <libcamera/base/log.h>
+
+#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/media_device.h"
+#include "libcamera/internal/media_object.h"
+#include "libcamera/internal/v4l2_subdevice.h"
+
+/**
+ * \file media_pipeline.h
+ * \brief Provide a representation of a pipeline of devices using the Media
+ * Controller.
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(MediaPipeline)
+
+/**
+ * \class MediaPipeline
+ * \brief The MediaPipeline represents a set of entities that together form a
+ * data path for stream data.
+ *
+ * A MediaPipeline instance is constructed from a sink and a source between
+ * two entities in a media graph.
+ */
+
+/**
+ * \brief Retrieve all source pads connected to a sink pad through active routes
+ *
+ * Examine the entity using the V4L2 Subdevice Routing API to collect all the
+ * source pads which are connected with an active route to the sink pad.
+ *
+ * \return A vector of source MediaPads
+ */
+std::vector<const MediaPad *> MediaPipeline::routedSourcePads(MediaPad *sink)
+{
+	MediaEntity *entity = sink->entity();
+	std::unique_ptr<V4L2Subdevice> subdev =
+		std::make_unique<V4L2Subdevice>(entity);
+
+	int ret = subdev->open();
+	if (ret < 0)
+		return {};
+
+	V4L2Subdevice::Routing routing = {};
+	ret = subdev->getRouting(&routing, V4L2Subdevice::ActiveFormat);
+	if (ret < 0)
+		return {};
+
+	std::vector<const MediaPad *> pads;
+
+	for (const V4L2Subdevice::Route &route : routing) {
+		if (sink->index() != route.sink.pad ||
+		    !(route.flags & V4L2_SUBDEV_ROUTE_FL_ACTIVE))
+			continue;
+
+		const MediaPad *pad = entity->getPadByIndex(route.source.pad);
+		if (!pad) {
+			LOG(MediaPipeline, Warning)
+				<< "Entity " << entity->name()
+				<< " has invalid route source pad "
+				<< route.source.pad;
+		}
+
+		pads.push_back(pad);
+	}
+
+	return pads;
+}
+
+/**
+ * \brief Find the path from source to sink
+ *
+ * Starting from a source entity, deteremine the shortest path to the
+ * target described by sink.
+ *
+ * If sink can not be found, or a route from source to sink can not be achieved
+ * an error of -ENOLINK will be returned.
+ *
+ * When successful, the MediaPipeline will internally store the representation
+ * of entities and links to describe the path between the two entities.
+ *
+ * \return 0 on success, a negative errno otherwise
+ */
+int MediaPipeline::init(MediaEntity *source, std::string sink)
+{
+	/*
+	 * Find the shortest path between from the Camera Sensor and the
+	 * target entity.
+	 */
+	std::unordered_set<MediaEntity *> visited;
+	std::queue<std::tuple<MediaEntity *, MediaPad *>> queue;
+
+	/* Remember at each entity where we came from. */
+	std::unordered_map<MediaEntity *, Entity> parents;
+	MediaEntity *entity = nullptr;
+	MediaEntity *target = nullptr;
+	MediaPad *sinkPad;
+
+	queue.push({ source, nullptr });
+
+	while (!queue.empty()) {
+		std::tie(entity, sinkPad) = queue.front();
+		queue.pop();
+
+		/* Found the target device. */
+		if (entity->name() == sink) {
+			LOG(MediaPipeline, Debug)
+				<< "Found Pipeline target " << entity->name();
+			target = entity;
+			break;
+		}
+
+		visited.insert(entity);
+
+		/*
+		 * Add direct downstream entities to the search queue. If the
+		 * current entity supports the subdev internal routing API,
+		 * restrict the search to downstream entities reachable through
+		 * active routes.
+		 */
+
+		std::vector<const MediaPad *> pads;
+		bool supportsRouting = false;
+
+		if (sinkPad) {
+			pads = routedSourcePads(sinkPad);
+			if (!pads.empty())
+				supportsRouting = true;
+		}
+
+		if (pads.empty()) {
+			for (const MediaPad *pad : entity->pads()) {
+				if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
+					continue;
+				pads.push_back(pad);
+			}
+		}
+
+		for (const MediaPad *pad : pads) {
+			for (MediaLink *link : pad->links()) {
+				MediaEntity *next = link->sink()->entity();
+				if (visited.find(next) == visited.end()) {
+					queue.push({ next, link->sink() });
+
+					Entity e{ entity, supportsRouting, sinkPad, pad, link };
+					parents.insert({ next, e });
+				}
+			}
+		}
+	}
+
+	if (!target) {
+		LOG(MediaPipeline, Error) << "Failed to connect " << source->name();
+		return -ENOLINK;
+	}
+
+	/*
+	 * With the parents, we can follow back our way from the capture device
+	 * to the sensor. Store all the entities in the pipeline, from the
+	 * camera sensor to the video node, in entities_.
+	 */
+	entities_.push_front({ entity, false, sinkPad, nullptr, nullptr });
+
+	for (auto it = parents.find(entity); it != parents.end();
+	     it = parents.find(entity)) {
+		const Entity &e = it->second;
+		entities_.push_front(e);
+		entity = e.entity;
+	}
+
+	LOG(MediaPipeline, Info)
+		<< "Found pipeline: "
+		<< utils::join(entities_, " -> ",
+			       [](const Entity &e) {
+				       std::string s = "[";
+				       if (e.sink)
+					       s += std::to_string(e.sink->index()) + "|";
+				       s += e.entity->name();
+				       if (e.source)
+					       s += "|" + std::to_string(e.source->index());
+				       s += "]";
+				       return s;
+			       });
+
+	return 0;
+}
+
+/**
+ * \brief Initialise and enable all links through the MediaPipeline
+ * \return 0 on success, or a negative errno otherwise
+ */
+int MediaPipeline::initLinks()
+{
+	int ret = 0;
+
+	MediaLink *sinkLink = nullptr;
+	for (Entity &e : entities_) {
+		/* Sensor entities have no connected sink. */
+		if (!sinkLink) {
+			sinkLink = e.sourceLink;
+			continue;
+		}
+
+		LOG(MediaPipeline, Debug) << "Enabling : " << *sinkLink;
+
+		if (!(sinkLink->flags() & MEDIA_LNK_FL_ENABLED)) {
+			ret = sinkLink->setEnabled(true);
+			if (ret < 0)
+				return ret;
+		}
+
+		sinkLink = e.sourceLink;
+	}
+
+	return ret;
+}
+
+/**
+ * \brief Configure the entities of this MediaPipeline
+ *
+ * Propagate formats through each of the entities of the Pipeline, validating
+ * that each one was not adjusted by the driver from the desired format.
+ *
+ * \return 0 on success or a negative errno otherwise
+ */
+int MediaPipeline::configure(CameraSensor *sensor, V4L2SubdeviceFormat *format)
+{
+	int ret;
+
+	for (const Entity &e : entities_) {
+		/* The sensor is configured through the CameraSensor */
+		if (!e.sourceLink)
+			break;
+
+		MediaLink *link = e.sourceLink;
+		MediaPad *source = link->source();
+		MediaPad *sink = link->sink();
+
+		/* 'format' already contains the sensor configuration */
+		if (source->entity() != sensor->entity()) {
+			/* Todo: Add MediaDevice cache to reduce FD pressure */
+			V4L2Subdevice subdev(source->entity());
+			ret = subdev.open();
+			if (ret)
+				return ret;
+
+			ret = subdev.getFormat(source->index(), format);
+			if (ret < 0)
+				return ret;
+		}
+
+		V4L2SubdeviceFormat sourceFormat = *format;
+		/* Todo: Add MediaDevice cache to reduce FD pressure */
+		V4L2Subdevice subdev(sink->entity());
+		ret = subdev.open();
+		if (ret)
+			return ret;
+
+		ret = subdev.setFormat(sink->index(), format);
+		if (ret < 0)
+			return ret;
+
+		if (format->code != sourceFormat.code ||
+		    format->size != sourceFormat.size) {
+			LOG(MediaPipeline, Debug)
+				<< "Source '" << *source
+				<< " produces " << sourceFormat
+				<< ", sink '" << *sink
+				<< " requires " << format;
+			return -EINVAL;
+		}
+
+		LOG(MediaPipeline, Debug)
+			<< "Link " << *link << " configured with format "
+			<< format;
+	}
+
+	return 0;
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
index aa9ab0291854..2c0f8603b231 100644
--- a/src/libcamera/meson.build
+++ b/src/libcamera/meson.build
@@ -41,6 +41,7 @@  libcamera_internal_sources = files([
     'mapped_framebuffer.cpp',
     'media_device.cpp',
     'media_object.cpp',
+    'media_pipeline.cpp',
     'pipeline_handler.cpp',
     'process.cpp',
     'pub_key.cpp',