[RFC,v2,2/2] libcamera: pipeline: virtual: Move image generation to separate thread
diff mbox series

Message ID 20250813102501.1645940-3-barnabas.pocze@ideasonboard.com
State New
Headers show
Series
  • libcamera: pipeline: virtual: Move image generation to separate thread
Related show

Commit Message

Barnabás Pőcze Aug. 13, 2025, 10:25 a.m. UTC
Currently the virtual pipeline generates the images synchronously. This is not
ideal because it blocks the camera manager's internal thread, and because its
behaviour is different from other existing pipeline handlers, all of which
complete requests asynchronously.

So move the image generation to a separate thread by deriving `VirtualCameraData`
from `Thread`, as well as `Object` and using the existing asynchronous signal
and method call mechanism.

Signed-off-by: Barnabás Pőcze <barnabas.pocze@ideasonboard.com>
---
 src/libcamera/pipeline/virtual/virtual.cpp | 87 ++++++++++++++--------
 src/libcamera/pipeline/virtual/virtual.h   |  9 ++-
 2 files changed, 66 insertions(+), 30 deletions(-)

Comments

Laurent Pinchart Aug. 13, 2025, 11:34 a.m. UTC | #1
Hi Barnabás,

Thank you for the patch.

On Wed, Aug 13, 2025 at 12:25:01PM +0200, Barnabás Pőcze wrote:
> Currently the virtual pipeline generates the images synchronously. This is not
> ideal because it blocks the camera manager's internal thread, and because its
> behaviour is different from other existing pipeline handlers, all of which
> complete requests asynchronously.
> 
> So move the image generation to a separate thread by deriving `VirtualCameraData`
> from `Thread`, as well as `Object` and using the existing asynchronous signal
> and method call mechanism.
> 
> Signed-off-by: Barnabás Pőcze <barnabas.pocze@ideasonboard.com>
> ---
>  src/libcamera/pipeline/virtual/virtual.cpp | 87 ++++++++++++++--------
>  src/libcamera/pipeline/virtual/virtual.h   |  9 ++-
>  2 files changed, 66 insertions(+), 30 deletions(-)
> 
> diff --git a/src/libcamera/pipeline/virtual/virtual.cpp b/src/libcamera/pipeline/virtual/virtual.cpp
> index 049ebcba5..48857491d 100644
> --- a/src/libcamera/pipeline/virtual/virtual.cpp
> +++ b/src/libcamera/pipeline/virtual/virtual.cpp
> @@ -107,6 +107,14 @@ private:
>  
>  	bool initFrameGenerator(Camera *camera);
>  
> +	void onBufferCompleted(FrameBuffer *buffer)

We don't normally prefix signal handlers with "on".

> +	{
> +		Request *request = buffer->request();
> +
> +		if (completeBuffer(request, buffer))
> +			completeRequest(request);
> +	}

I wouldn't have made this an inline function, but no big deal.

> +
>  	DmaBufAllocator dmaBufAllocator_;
>  
>  	bool resetCreated_ = false;
> @@ -129,6 +137,38 @@ VirtualCameraData::VirtualCameraData(PipelineHandler *pipe,
>  
>  	/* \todo Support multiple streams and pass multi_stream_test */
>  	streamConfigs_.resize(kMaxStream);
> +
> +	moveToThread(this);
> +}
> +
> +void VirtualCameraData::queueRequest(Request *request)

This function doesn't queue a request, but processes it. How about
naming it processRequest() ?

> +{
> +	for (auto const &[stream, buffer] : request->buffers()) {
> +		bool found = false;
> +		/* map buffer and fill test patterns */
> +		for (auto &streamConfig : streamConfigs_) {
> +			if (stream == &streamConfig.stream) {
> +				FrameMetadata &fmd = buffer->_d()->metadata();
> +
> +				fmd.status = FrameMetadata::Status::FrameSuccess;
> +				fmd.sequence = streamConfig.seq++;
> +				fmd.timestamp = currentTimestamp();
> +
> +				for (const auto [i, p] : utils::enumerate(buffer->planes()))
> +					fmd.planes()[i].bytesused = p.length;
> +
> +				found = true;
> +
> +				if (streamConfig.frameGenerator->generateFrame(
> +					    stream->configuration().size, buffer))
> +					fmd.status = FrameMetadata::Status::FrameError;
> +
> +				bufferCompleted.emit(buffer);
> +				break;
> +			}
> +		}
> +		ASSERT(found);
> +	}
>  }
>  
>  VirtualCameraConfiguration::VirtualCameraConfiguration(VirtualCameraData *data)
> @@ -291,11 +331,27 @@ int PipelineHandlerVirtual::start([[maybe_unused]] Camera *camera,
>  	for (auto &s : data->streamConfigs_)
>  		s.seq = 0;
>  
> +	data->bufferCompleted.connect(this, &PipelineHandlerVirtual::onBufferCompleted);
> +	data->start();
> +
>  	return 0;
>  }
>  
> -void PipelineHandlerVirtual::stopDevice([[maybe_unused]] Camera *camera)
> +void PipelineHandlerVirtual::stopDevice(Camera *camera)
>  {
> +	VirtualCameraData *data = cameraData(camera);
> +
> +	/* Cancel pending work. */
> +	data->exit();
> +	data->wait();
> +	data->removeMessages(data);
> +
> +	/* Process pending `bufferCompleted` signals. */
> +	thread()->dispatchMessages(Message::Type::InvokeMessage, this);
> +	data->bufferCompleted.disconnect(this);
> +
> +	while (!data->queuedRequests_.empty())
> +		cancelRequest(data->queuedRequests_.front());
>  }
>  
>  int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
> @@ -304,35 +360,8 @@ int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
>  	VirtualCameraData *data = cameraData(camera);
>  	const auto timestamp = currentTimestamp();
>  
> -	for (auto const &[stream, buffer] : request->buffers()) {
> -		bool found = false;
> -		/* map buffer and fill test patterns */
> -		for (auto &streamConfig : data->streamConfigs_) {
> -			if (stream == &streamConfig.stream) {
> -				FrameMetadata &fmd = buffer->_d()->metadata();
> -
> -				fmd.status = FrameMetadata::Status::FrameSuccess;
> -				fmd.sequence = streamConfig.seq++;
> -				fmd.timestamp = timestamp;
> -
> -				for (const auto [i, p] : utils::enumerate(buffer->planes()))
> -					fmd.planes()[i].bytesused = p.length;
> -
> -				found = true;
> -
> -				if (streamConfig.frameGenerator->generateFrame(
> -					    stream->configuration().size, buffer))
> -					fmd.status = FrameMetadata::Status::FrameError;
> -
> -				completeBuffer(request, buffer);
> -				break;
> -			}
> -		}
> -		ASSERT(found);
> -	}
> -
>  	request->metadata().set(controls::SensorTimestamp, timestamp);
> -	completeRequest(request);
> +	data->invokeMethod(&VirtualCameraData::queueRequest, ConnectionTypeQueued, request);

Line wrap.

>  
>  	return 0;
>  }
> diff --git a/src/libcamera/pipeline/virtual/virtual.h b/src/libcamera/pipeline/virtual/virtual.h
> index 683cb82b4..2d83dfe54 100644
> --- a/src/libcamera/pipeline/virtual/virtual.h
> +++ b/src/libcamera/pipeline/virtual/virtual.h
> @@ -11,6 +11,8 @@
>  #include <variant>
>  #include <vector>
>  
> +#include <libcamera/base/object.h>
> +#include <libcamera/base/thread.h>

Missing blank line.

With those minor issues addressed,

Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>

>  #include <libcamera/geometry.h>
>  #include <libcamera/stream.h>
>  
> @@ -25,7 +27,9 @@ namespace libcamera {
>  
>  using VirtualFrame = std::variant<TestPattern, ImageFrames>;
>  
> -class VirtualCameraData : public Camera::Private
> +class VirtualCameraData : public Camera::Private,
> +			  public Thread,
> +			  public Object
>  {
>  public:
>  	const static unsigned int kMaxStream = 3;
> @@ -54,9 +58,12 @@ public:
>  
>  	~VirtualCameraData() = default;
>  
> +	void queueRequest(Request *request);
> +
>  	Configuration config_;
>  
>  	std::vector<StreamConfig> streamConfigs_;
> +	Signal<FrameBuffer *> bufferCompleted;
>  };
>  
>  } /* namespace libcamera */
Barnabás Pőcze Aug. 13, 2025, 1 p.m. UTC | #2
2025. 08. 13. 13:34 keltezéssel, Laurent Pinchart írta:
> Hi Barnabás,
> 
> Thank you for the patch.
> 
> On Wed, Aug 13, 2025 at 12:25:01PM +0200, Barnabás Pőcze wrote:
>> Currently the virtual pipeline generates the images synchronously. This is not
>> ideal because it blocks the camera manager's internal thread, and because its
>> behaviour is different from other existing pipeline handlers, all of which
>> complete requests asynchronously.
>>
>> So move the image generation to a separate thread by deriving `VirtualCameraData`
>> from `Thread`, as well as `Object` and using the existing asynchronous signal
>> and method call mechanism.
>>
>> Signed-off-by: Barnabás Pőcze <barnabas.pocze@ideasonboard.com>
>> ---
>>   src/libcamera/pipeline/virtual/virtual.cpp | 87 ++++++++++++++--------
>>   src/libcamera/pipeline/virtual/virtual.h   |  9 ++-
>>   2 files changed, 66 insertions(+), 30 deletions(-)
>>
>> diff --git a/src/libcamera/pipeline/virtual/virtual.cpp b/src/libcamera/pipeline/virtual/virtual.cpp
>> index 049ebcba5..48857491d 100644
>> --- a/src/libcamera/pipeline/virtual/virtual.cpp
>> +++ b/src/libcamera/pipeline/virtual/virtual.cpp
>> @@ -107,6 +107,14 @@ private:
>>   
>>   	bool initFrameGenerator(Camera *camera);
>>   
>> +	void onBufferCompleted(FrameBuffer *buffer)
> 
> We don't normally prefix signal handlers with "on".

OK


> 
>> +	{
>> +		Request *request = buffer->request();
>> +
>> +		if (completeBuffer(request, buffer))
>> +			completeRequest(request);
>> +	}
> 
> I wouldn't have made this an inline function, but no big deal.

So defining it out of line? I fear we're going from a short lambda
to the longest possible version. :(


> 
>> +
>>   	DmaBufAllocator dmaBufAllocator_;
>>   
>>   	bool resetCreated_ = false;
>> @@ -129,6 +137,38 @@ VirtualCameraData::VirtualCameraData(PipelineHandler *pipe,
>>   
>>   	/* \todo Support multiple streams and pass multi_stream_test */
>>   	streamConfigs_.resize(kMaxStream);
>> +
>> +	moveToThread(this);
>> +}
>> +
>> +void VirtualCameraData::queueRequest(Request *request)
> 
> This function doesn't queue a request, but processes it. How about
> naming it processRequest() ?

OK


> 
>> +{
>> +	for (auto const &[stream, buffer] : request->buffers()) {
>> +		bool found = false;
>> +		/* map buffer and fill test patterns */
>> +		for (auto &streamConfig : streamConfigs_) {
>> +			if (stream == &streamConfig.stream) {
>> +				FrameMetadata &fmd = buffer->_d()->metadata();
>> +
>> +				fmd.status = FrameMetadata::Status::FrameSuccess;
>> +				fmd.sequence = streamConfig.seq++;
>> +				fmd.timestamp = currentTimestamp();
>> +
>> +				for (const auto [i, p] : utils::enumerate(buffer->planes()))
>> +					fmd.planes()[i].bytesused = p.length;
>> +
>> +				found = true;
>> +
>> +				if (streamConfig.frameGenerator->generateFrame(
>> +					    stream->configuration().size, buffer))
>> +					fmd.status = FrameMetadata::Status::FrameError;
>> +
>> +				bufferCompleted.emit(buffer);
>> +				break;
>> +			}
>> +		}
>> +		ASSERT(found);
>> +	}
>>   }
>>   
>>   VirtualCameraConfiguration::VirtualCameraConfiguration(VirtualCameraData *data)
>> @@ -291,11 +331,27 @@ int PipelineHandlerVirtual::start([[maybe_unused]] Camera *camera,
>>   	for (auto &s : data->streamConfigs_)
>>   		s.seq = 0;
>>   
>> +	data->bufferCompleted.connect(this, &PipelineHandlerVirtual::onBufferCompleted);
>> +	data->start();
>> +
>>   	return 0;
>>   }
>>   
>> -void PipelineHandlerVirtual::stopDevice([[maybe_unused]] Camera *camera)
>> +void PipelineHandlerVirtual::stopDevice(Camera *camera)
>>   {
>> +	VirtualCameraData *data = cameraData(camera);
>> +
>> +	/* Cancel pending work. */
>> +	data->exit();
>> +	data->wait();
>> +	data->removeMessages(data);
>> +
>> +	/* Process pending `bufferCompleted` signals. */
>> +	thread()->dispatchMessages(Message::Type::InvokeMessage, this);
>> +	data->bufferCompleted.disconnect(this);
>> +
>> +	while (!data->queuedRequests_.empty())
>> +		cancelRequest(data->queuedRequests_.front());
>>   }
>>   
>>   int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
>> @@ -304,35 +360,8 @@ int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
>>   	VirtualCameraData *data = cameraData(camera);
>>   	const auto timestamp = currentTimestamp();
>>   
>> -	for (auto const &[stream, buffer] : request->buffers()) {
>> -		bool found = false;
>> -		/* map buffer and fill test patterns */
>> -		for (auto &streamConfig : data->streamConfigs_) {
>> -			if (stream == &streamConfig.stream) {
>> -				FrameMetadata &fmd = buffer->_d()->metadata();
>> -
>> -				fmd.status = FrameMetadata::Status::FrameSuccess;
>> -				fmd.sequence = streamConfig.seq++;
>> -				fmd.timestamp = timestamp;
>> -
>> -				for (const auto [i, p] : utils::enumerate(buffer->planes()))
>> -					fmd.planes()[i].bytesused = p.length;
>> -
>> -				found = true;
>> -
>> -				if (streamConfig.frameGenerator->generateFrame(
>> -					    stream->configuration().size, buffer))
>> -					fmd.status = FrameMetadata::Status::FrameError;
>> -
>> -				completeBuffer(request, buffer);
>> -				break;
>> -			}
>> -		}
>> -		ASSERT(found);
>> -	}
>> -
>>   	request->metadata().set(controls::SensorTimestamp, timestamp);
>> -	completeRequest(request);
>> +	data->invokeMethod(&VirtualCameraData::queueRequest, ConnectionTypeQueued, request);
> 
> Line wrap.

OK


> 
>>   
>>   	return 0;
>>   }
>> diff --git a/src/libcamera/pipeline/virtual/virtual.h b/src/libcamera/pipeline/virtual/virtual.h
>> index 683cb82b4..2d83dfe54 100644
>> --- a/src/libcamera/pipeline/virtual/virtual.h
>> +++ b/src/libcamera/pipeline/virtual/virtual.h
>> @@ -11,6 +11,8 @@
>>   #include <variant>
>>   #include <vector>
>>   
>> +#include <libcamera/base/object.h>
>> +#include <libcamera/base/thread.h>
> 
> Missing blank line.

Hmmm... checkstyle.py does not complain.


Regards,
Barnabás Pőcze

> 
> With those minor issues addressed,
> 
> Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> 
>>   #include <libcamera/geometry.h>
>>   #include <libcamera/stream.h>
>>   
>> @@ -25,7 +27,9 @@ namespace libcamera {
>>   
>>   using VirtualFrame = std::variant<TestPattern, ImageFrames>;
>>   
>> -class VirtualCameraData : public Camera::Private
>> +class VirtualCameraData : public Camera::Private,
>> +			  public Thread,
>> +			  public Object
>>   {
>>   public:
>>   	const static unsigned int kMaxStream = 3;
>> @@ -54,9 +58,12 @@ public:
>>   
>>   	~VirtualCameraData() = default;
>>   
>> +	void queueRequest(Request *request);
>> +
>>   	Configuration config_;
>>   
>>   	std::vector<StreamConfig> streamConfigs_;
>> +	Signal<FrameBuffer *> bufferCompleted;
>>   };
>>   
>>   } /* namespace libcamera */
>
Laurent Pinchart Aug. 13, 2025, 1:07 p.m. UTC | #3
On Wed, Aug 13, 2025 at 03:00:21PM +0200, Barnabás Pőcze wrote:
> 2025. 08. 13. 13:34 keltezéssel, Laurent Pinchart írta:
> > On Wed, Aug 13, 2025 at 12:25:01PM +0200, Barnabás Pőcze wrote:
> >> Currently the virtual pipeline generates the images synchronously. This is not
> >> ideal because it blocks the camera manager's internal thread, and because its
> >> behaviour is different from other existing pipeline handlers, all of which
> >> complete requests asynchronously.
> >>
> >> So move the image generation to a separate thread by deriving `VirtualCameraData`
> >> from `Thread`, as well as `Object` and using the existing asynchronous signal
> >> and method call mechanism.
> >>
> >> Signed-off-by: Barnabás Pőcze <barnabas.pocze@ideasonboard.com>
> >> ---
> >>   src/libcamera/pipeline/virtual/virtual.cpp | 87 ++++++++++++++--------
> >>   src/libcamera/pipeline/virtual/virtual.h   |  9 ++-
> >>   2 files changed, 66 insertions(+), 30 deletions(-)
> >>
> >> diff --git a/src/libcamera/pipeline/virtual/virtual.cpp b/src/libcamera/pipeline/virtual/virtual.cpp
> >> index 049ebcba5..48857491d 100644
> >> --- a/src/libcamera/pipeline/virtual/virtual.cpp
> >> +++ b/src/libcamera/pipeline/virtual/virtual.cpp
> >> @@ -107,6 +107,14 @@ private:
> >>   
> >>   	bool initFrameGenerator(Camera *camera);
> >>   
> >> +	void onBufferCompleted(FrameBuffer *buffer)
> > 
> > We don't normally prefix signal handlers with "on".
> 
> OK
> 
> >> +	{
> >> +		Request *request = buffer->request();
> >> +
> >> +		if (completeBuffer(request, buffer))
> >> +			completeRequest(request);
> >> +	}
> > 
> > I wouldn't have made this an inline function, but no big deal.
> 
> So defining it out of line? I fear we're going from a short lambda
> to the longest possible version. :(

That's why I said it's no big deal :-) I can ignore it.

> >> +
> >>   	DmaBufAllocator dmaBufAllocator_;
> >>   
> >>   	bool resetCreated_ = false;
> >> @@ -129,6 +137,38 @@ VirtualCameraData::VirtualCameraData(PipelineHandler *pipe,
> >>   
> >>   	/* \todo Support multiple streams and pass multi_stream_test */
> >>   	streamConfigs_.resize(kMaxStream);
> >> +
> >> +	moveToThread(this);
> >> +}
> >> +
> >> +void VirtualCameraData::queueRequest(Request *request)
> > 
> > This function doesn't queue a request, but processes it. How about
> > naming it processRequest() ?
> 
> OK
> 
> >> +{
> >> +	for (auto const &[stream, buffer] : request->buffers()) {
> >> +		bool found = false;
> >> +		/* map buffer and fill test patterns */
> >> +		for (auto &streamConfig : streamConfigs_) {
> >> +			if (stream == &streamConfig.stream) {
> >> +				FrameMetadata &fmd = buffer->_d()->metadata();
> >> +
> >> +				fmd.status = FrameMetadata::Status::FrameSuccess;
> >> +				fmd.sequence = streamConfig.seq++;
> >> +				fmd.timestamp = currentTimestamp();
> >> +
> >> +				for (const auto [i, p] : utils::enumerate(buffer->planes()))
> >> +					fmd.planes()[i].bytesused = p.length;
> >> +
> >> +				found = true;
> >> +
> >> +				if (streamConfig.frameGenerator->generateFrame(
> >> +					    stream->configuration().size, buffer))
> >> +					fmd.status = FrameMetadata::Status::FrameError;
> >> +
> >> +				bufferCompleted.emit(buffer);
> >> +				break;
> >> +			}
> >> +		}
> >> +		ASSERT(found);
> >> +	}
> >>   }
> >>   
> >>   VirtualCameraConfiguration::VirtualCameraConfiguration(VirtualCameraData *data)
> >> @@ -291,11 +331,27 @@ int PipelineHandlerVirtual::start([[maybe_unused]] Camera *camera,
> >>   	for (auto &s : data->streamConfigs_)
> >>   		s.seq = 0;
> >>   
> >> +	data->bufferCompleted.connect(this, &PipelineHandlerVirtual::onBufferCompleted);
> >> +	data->start();
> >> +
> >>   	return 0;
> >>   }
> >>   
> >> -void PipelineHandlerVirtual::stopDevice([[maybe_unused]] Camera *camera)
> >> +void PipelineHandlerVirtual::stopDevice(Camera *camera)
> >>   {
> >> +	VirtualCameraData *data = cameraData(camera);
> >> +
> >> +	/* Cancel pending work. */
> >> +	data->exit();
> >> +	data->wait();
> >> +	data->removeMessages(data);
> >> +
> >> +	/* Process pending `bufferCompleted` signals. */
> >> +	thread()->dispatchMessages(Message::Type::InvokeMessage, this);
> >> +	data->bufferCompleted.disconnect(this);
> >> +
> >> +	while (!data->queuedRequests_.empty())
> >> +		cancelRequest(data->queuedRequests_.front());
> >>   }
> >>   
> >>   int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
> >> @@ -304,35 +360,8 @@ int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
> >>   	VirtualCameraData *data = cameraData(camera);
> >>   	const auto timestamp = currentTimestamp();
> >>   
> >> -	for (auto const &[stream, buffer] : request->buffers()) {
> >> -		bool found = false;
> >> -		/* map buffer and fill test patterns */
> >> -		for (auto &streamConfig : data->streamConfigs_) {
> >> -			if (stream == &streamConfig.stream) {
> >> -				FrameMetadata &fmd = buffer->_d()->metadata();
> >> -
> >> -				fmd.status = FrameMetadata::Status::FrameSuccess;
> >> -				fmd.sequence = streamConfig.seq++;
> >> -				fmd.timestamp = timestamp;
> >> -
> >> -				for (const auto [i, p] : utils::enumerate(buffer->planes()))
> >> -					fmd.planes()[i].bytesused = p.length;
> >> -
> >> -				found = true;
> >> -
> >> -				if (streamConfig.frameGenerator->generateFrame(
> >> -					    stream->configuration().size, buffer))
> >> -					fmd.status = FrameMetadata::Status::FrameError;
> >> -
> >> -				completeBuffer(request, buffer);
> >> -				break;
> >> -			}
> >> -		}
> >> -		ASSERT(found);
> >> -	}
> >> -
> >>   	request->metadata().set(controls::SensorTimestamp, timestamp);
> >> -	completeRequest(request);
> >> +	data->invokeMethod(&VirtualCameraData::queueRequest, ConnectionTypeQueued, request);
> > 
> > Line wrap.
> 
> OK
> 
> >>   
> >>   	return 0;
> >>   }
> >> diff --git a/src/libcamera/pipeline/virtual/virtual.h b/src/libcamera/pipeline/virtual/virtual.h
> >> index 683cb82b4..2d83dfe54 100644
> >> --- a/src/libcamera/pipeline/virtual/virtual.h
> >> +++ b/src/libcamera/pipeline/virtual/virtual.h
> >> @@ -11,6 +11,8 @@
> >>   #include <variant>
> >>   #include <vector>
> >>   
> >> +#include <libcamera/base/object.h>
> >> +#include <libcamera/base/thread.h>
> > 
> > Missing blank line.
> 
> Hmmm... checkstyle.py does not complain.

I would have expected it to :-/

> > With those minor issues addressed,
> > 
> > Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
> > 
> >>   #include <libcamera/geometry.h>
> >>   #include <libcamera/stream.h>
> >>   
> >> @@ -25,7 +27,9 @@ namespace libcamera {
> >>   
> >>   using VirtualFrame = std::variant<TestPattern, ImageFrames>;
> >>   
> >> -class VirtualCameraData : public Camera::Private
> >> +class VirtualCameraData : public Camera::Private,
> >> +			  public Thread,
> >> +			  public Object
> >>   {
> >>   public:
> >>   	const static unsigned int kMaxStream = 3;
> >> @@ -54,9 +58,12 @@ public:
> >>   
> >>   	~VirtualCameraData() = default;
> >>   
> >> +	void queueRequest(Request *request);
> >> +
> >>   	Configuration config_;
> >>   
> >>   	std::vector<StreamConfig> streamConfigs_;
> >> +	Signal<FrameBuffer *> bufferCompleted;
> >>   };
> >>   
> >>   } /* namespace libcamera */

Patch
diff mbox series

diff --git a/src/libcamera/pipeline/virtual/virtual.cpp b/src/libcamera/pipeline/virtual/virtual.cpp
index 049ebcba5..48857491d 100644
--- a/src/libcamera/pipeline/virtual/virtual.cpp
+++ b/src/libcamera/pipeline/virtual/virtual.cpp
@@ -107,6 +107,14 @@  private:
 
 	bool initFrameGenerator(Camera *camera);
 
+	void onBufferCompleted(FrameBuffer *buffer)
+	{
+		Request *request = buffer->request();
+
+		if (completeBuffer(request, buffer))
+			completeRequest(request);
+	}
+
 	DmaBufAllocator dmaBufAllocator_;
 
 	bool resetCreated_ = false;
@@ -129,6 +137,38 @@  VirtualCameraData::VirtualCameraData(PipelineHandler *pipe,
 
 	/* \todo Support multiple streams and pass multi_stream_test */
 	streamConfigs_.resize(kMaxStream);
+
+	moveToThread(this);
+}
+
+void VirtualCameraData::queueRequest(Request *request)
+{
+	for (auto const &[stream, buffer] : request->buffers()) {
+		bool found = false;
+		/* map buffer and fill test patterns */
+		for (auto &streamConfig : streamConfigs_) {
+			if (stream == &streamConfig.stream) {
+				FrameMetadata &fmd = buffer->_d()->metadata();
+
+				fmd.status = FrameMetadata::Status::FrameSuccess;
+				fmd.sequence = streamConfig.seq++;
+				fmd.timestamp = currentTimestamp();
+
+				for (const auto [i, p] : utils::enumerate(buffer->planes()))
+					fmd.planes()[i].bytesused = p.length;
+
+				found = true;
+
+				if (streamConfig.frameGenerator->generateFrame(
+					    stream->configuration().size, buffer))
+					fmd.status = FrameMetadata::Status::FrameError;
+
+				bufferCompleted.emit(buffer);
+				break;
+			}
+		}
+		ASSERT(found);
+	}
 }
 
 VirtualCameraConfiguration::VirtualCameraConfiguration(VirtualCameraData *data)
@@ -291,11 +331,27 @@  int PipelineHandlerVirtual::start([[maybe_unused]] Camera *camera,
 	for (auto &s : data->streamConfigs_)
 		s.seq = 0;
 
+	data->bufferCompleted.connect(this, &PipelineHandlerVirtual::onBufferCompleted);
+	data->start();
+
 	return 0;
 }
 
-void PipelineHandlerVirtual::stopDevice([[maybe_unused]] Camera *camera)
+void PipelineHandlerVirtual::stopDevice(Camera *camera)
 {
+	VirtualCameraData *data = cameraData(camera);
+
+	/* Cancel pending work. */
+	data->exit();
+	data->wait();
+	data->removeMessages(data);
+
+	/* Process pending `bufferCompleted` signals. */
+	thread()->dispatchMessages(Message::Type::InvokeMessage, this);
+	data->bufferCompleted.disconnect(this);
+
+	while (!data->queuedRequests_.empty())
+		cancelRequest(data->queuedRequests_.front());
 }
 
 int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
@@ -304,35 +360,8 @@  int PipelineHandlerVirtual::queueRequestDevice([[maybe_unused]] Camera *camera,
 	VirtualCameraData *data = cameraData(camera);
 	const auto timestamp = currentTimestamp();
 
-	for (auto const &[stream, buffer] : request->buffers()) {
-		bool found = false;
-		/* map buffer and fill test patterns */
-		for (auto &streamConfig : data->streamConfigs_) {
-			if (stream == &streamConfig.stream) {
-				FrameMetadata &fmd = buffer->_d()->metadata();
-
-				fmd.status = FrameMetadata::Status::FrameSuccess;
-				fmd.sequence = streamConfig.seq++;
-				fmd.timestamp = timestamp;
-
-				for (const auto [i, p] : utils::enumerate(buffer->planes()))
-					fmd.planes()[i].bytesused = p.length;
-
-				found = true;
-
-				if (streamConfig.frameGenerator->generateFrame(
-					    stream->configuration().size, buffer))
-					fmd.status = FrameMetadata::Status::FrameError;
-
-				completeBuffer(request, buffer);
-				break;
-			}
-		}
-		ASSERT(found);
-	}
-
 	request->metadata().set(controls::SensorTimestamp, timestamp);
-	completeRequest(request);
+	data->invokeMethod(&VirtualCameraData::queueRequest, ConnectionTypeQueued, request);
 
 	return 0;
 }
diff --git a/src/libcamera/pipeline/virtual/virtual.h b/src/libcamera/pipeline/virtual/virtual.h
index 683cb82b4..2d83dfe54 100644
--- a/src/libcamera/pipeline/virtual/virtual.h
+++ b/src/libcamera/pipeline/virtual/virtual.h
@@ -11,6 +11,8 @@ 
 #include <variant>
 #include <vector>
 
+#include <libcamera/base/object.h>
+#include <libcamera/base/thread.h>
 #include <libcamera/geometry.h>
 #include <libcamera/stream.h>
 
@@ -25,7 +27,9 @@  namespace libcamera {
 
 using VirtualFrame = std::variant<TestPattern, ImageFrames>;
 
-class VirtualCameraData : public Camera::Private
+class VirtualCameraData : public Camera::Private,
+			  public Thread,
+			  public Object
 {
 public:
 	const static unsigned int kMaxStream = 3;
@@ -54,9 +58,12 @@  public:
 
 	~VirtualCameraData() = default;
 
+	void queueRequest(Request *request);
+
 	Configuration config_;
 
 	std::vector<StreamConfig> streamConfigs_;
+	Signal<FrameBuffer *> bufferCompleted;
 };
 
 } /* namespace libcamera */