@@ -1173,6 +1173,21 @@ void CameraDevice::requestComplete(Request *request)
return;
}
+ FrameBuffer *source = src;
+ if (cameraStream->type() != CameraStream::Type::Internal) {
+ /*
+ * The source buffer is owned by Request object which
+ * can be reused for future capture request. Since
+ * post-processor will run asynchrnously, we need to
+ * copy the source buffer and use it as source buffer.
+ */
+ for (const auto &plane : src->planes())
+ descriptor.srcPlanes_.push_back(plane);
+ descriptor.srcFramebuffer_ =
+ std::make_unique<FrameBuffer>(descriptor.srcPlanes_);
+ source = descriptor.srcFramebuffer_.get();
+ }
+
std::unique_ptr<Camera3RequestDescriptor> reqDescriptor =
std::make_unique<Camera3RequestDescriptor>();
*reqDescriptor = std::move(descriptor);
@@ -1186,7 +1201,7 @@ void CameraDevice::requestComplete(Request *request)
metadata);
});
- int ret = cameraStream->process(src, currentDescriptor->destBuffer_.get(),
+ int ret = cameraStream->process(source, currentDescriptor->destBuffer_.get(),
currentDescriptor->settings_,
metadata);
return;
@@ -1260,6 +1275,7 @@ void CameraDevice::streamProcessingComplete(CameraStream *cameraStream,
void CameraDevice::sendQueuedCaptureResults()
{
+ MutexLocker lock(queuedDescriptorsMutex_);
while (!queuedDescriptor_.empty()) {
std::unique_ptr<Camera3RequestDescriptor> &d = queuedDescriptor_.front();
if (d->status_ != Camera3RequestDescriptor::NOT_FINISHED) {
@@ -100,6 +100,9 @@ private:
camera3_capture_result_t captureResult_;
libcamera::FrameBuffer *internalBuffer_;
completionStatus status_;
+
+ std::unique_ptr<libcamera::FrameBuffer> srcFramebuffer_;
+ std::vector<libcamera::FrameBuffer::Plane> srcPlanes_;
};
enum class State {
@@ -147,6 +150,7 @@ private:
libcamera::Mutex descriptorsMutex_; /* Protects descriptors_. */
std::map<uint64_t, Camera3RequestDescriptor> descriptors_;
+ libcamera::Mutex queuedDescriptorsMutex_; /* Protects queuedDescriptor_. */
std::deque<std::unique_ptr<Camera3RequestDescriptor>> queuedDescriptor_;
std::string maker_;
@@ -55,6 +55,15 @@ CameraStream::CameraStream(CameraDevice *const cameraDevice,
* is what we instantiate here.
*/
postProcessor_ = std::make_unique<PostProcessorJpeg>(cameraDevice_);
+ ppWorker_ = std::make_unique<PostProcessorWorker>(postProcessor_.get());
+
+ thread_ = std::make_unique<libcamera::Thread>();
+ ppWorker_->moveToThread(thread_.get());
+ /*
+ * \todo: Class is MoveConstructible, so where to stop thread
+ * if we don't user-defined destructor?
+ */
+ thread_->start();
}
if (type == Type::Internal) {
@@ -108,7 +117,11 @@ int CameraStream::process(const FrameBuffer *source,
if (!postProcessor_)
return 0;
- return postProcessor_->process(source, destBuffer, requestMetadata, resultMetadata);
+ ppWorker_->invokeMethod(&PostProcessorWorker::process,
+ ConnectionTypeQueued, source, destBuffer,
+ requestMetadata, resultMetadata);
+
+ return 0;
}
void CameraStream::handlePostProcessing(PostProcessor::Status status)
@@ -13,7 +13,9 @@
#include <hardware/camera3.h>
+#include <libcamera/base/object.h>
#include <libcamera/base/signal.h>
+#include <libcamera/base/thread.h>
#include <libcamera/camera.h>
#include <libcamera/framebuffer.h>
@@ -23,6 +25,7 @@
#include "post_processor.h"
+class CameraBuffer;
class CameraDevice;
class CameraMetadata;
@@ -135,6 +138,27 @@ public:
libcamera::Signal<ProcessStatus> processComplete;
private:
+ class PostProcessorWorker : public libcamera::Object
+ {
+ public:
+ PostProcessorWorker(PostProcessor *postProcessor)
+ {
+ postProcessor_ = postProcessor;
+ }
+
+ void process(const libcamera::FrameBuffer *source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata)
+ {
+ postProcessor_->process(source, destination,
+ requestMetadata, resultMetadata);
+ }
+
+ private:
+ PostProcessor *postProcessor_;
+ };
+
void handlePostProcessing(PostProcessor::Status status);
CameraDevice *const cameraDevice_;
@@ -151,6 +175,8 @@ private:
*/
std::unique_ptr<std::mutex> mutex_;
std::unique_ptr<PostProcessor> postProcessor_;
+ std::unique_ptr<PostProcessorWorker> ppWorker_;
+ std::unique_ptr<libcamera::Thread> thread_;
};
#endif /* __ANDROID_CAMERA_STREAM__ */
In CameraStream, introduce a new private PostProcessorWorker class derived from Object. A instance of PostProcessorWorker is moved to a separate thread instance which will be responsible to run the post-processor. Running PostProcessor asynchronously should entail that all the data context needed by the PostProcessor should remain valid for the entire duration of its run. Most of the context preserving part has been addressed inthe previous patch, we just need to ensure the source framebuffer data that comes via Camera::Request, should remain valid for the entire duration of post-processing running asynchronously. In order to so, we maintain a separate copy of the framebuffer data and add it to the Camera3RequestDescriptor in which we preserve rest of the context. Signed-off-by: Umang Jain <umang.jain@ideasonboard.com> --- src/android/camera_device.cpp | 18 +++++++++++++++++- src/android/camera_device.h | 4 ++++ src/android/camera_stream.cpp | 15 ++++++++++++++- src/android/camera_stream.h | 26 ++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 2 deletions(-)