@@ -591,6 +591,39 @@ gst_task_resume(GstTask *task)
}
#endif
+#if !GST_CHECK_VERSION(1, 22, 0)
+/*
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Library <2002> Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) <2007> David A. Schleef <ds@schleef.org>
+ */
+/* This function has been imported directly from the gstreamer project to
+ * support backwards compatibility and should be removed when the older
+ * version is no longer supported. */
+gint gst_video_format_info_extrapolate_stride(const GstVideoFormatInfo *finfo, gint plane, gint stride)
+{
+ gint estride;
+ gint comp[GST_VIDEO_MAX_COMPONENTS];
+ gint i;
+
+ /* there is nothing to extrapolate on first plane */
+ if (plane == 0)
+ return stride;
+
+ gst_video_format_info_component(finfo, plane, comp);
+
+ /* For now, all planar formats have a single component on first plane, but
+ * if there was a planar format with more, we'd have to make a ratio of the
+ * number of component on the first plane against the number of component on
+ * the current plane. */
+ estride = 0;
+ for (i = 0; i < GST_VIDEO_MAX_COMPONENTS && comp[i] >= 0; i++)
+ estride += GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(finfo, comp[i], stride);
+
+ return estride;
+}
+#endif
+
G_LOCK_DEFINE_STATIC(cm_singleton_lock);
static std::weak_ptr<CameraManager> cm_singleton_ptr;
@@ -35,6 +35,10 @@ static inline void gst_clear_event(GstEvent **event_ptr)
#if !GST_CHECK_VERSION(1, 17, 1)
gboolean gst_task_resume(GstTask *task);
#endif
+
+#if !GST_CHECK_VERSION(1, 22, 0)
+gint gst_video_format_info_extrapolate_stride(const GstVideoFormatInfo *finfo, gint plane, gint stride);
+#endif
std::shared_ptr<libcamera::CameraManager> gst_libcamera_get_camera_manager(int &ret);
/**
@@ -135,7 +135,8 @@ gst_libcamera_pool_class_init(GstLibcameraPoolClass *klass)
}
GstLibcameraPool *
-gst_libcamera_pool_new(GstLibcameraAllocator *allocator, Stream *stream)
+gst_libcamera_pool_new(GstLibcameraAllocator *allocator, Stream *stream,
+ GstVideoInfo *info, gboolean add_video_meta)
{
auto *pool = GST_LIBCAMERA_POOL(g_object_new(GST_TYPE_LIBCAMERA_POOL, nullptr));
@@ -145,6 +146,12 @@ gst_libcamera_pool_new(GstLibcameraAllocator *allocator, Stream *stream)
gsize pool_size = gst_libcamera_allocator_get_pool_size(allocator, stream);
for (gsize i = 0; i < pool_size; i++) {
GstBuffer *buffer = gst_buffer_new();
+ if (add_video_meta) {
+ gst_buffer_add_video_meta_full(buffer, GST_VIDEO_FRAME_FLAG_NONE,
+ GST_VIDEO_INFO_FORMAT(info), GST_VIDEO_INFO_WIDTH(info),
+ GST_VIDEO_INFO_HEIGHT(info), GST_VIDEO_INFO_N_PLANES(info),
+ info->offset, info->stride);
+ }
pool->queue->push_back(buffer);
}
@@ -14,6 +14,7 @@
#include "gstlibcameraallocator.h"
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <libcamera/stream.h>
@@ -21,7 +22,7 @@
G_DECLARE_FINAL_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_LIBCAMERA, POOL, GstBufferPool)
GstLibcameraPool *gst_libcamera_pool_new(GstLibcameraAllocator *allocator,
- libcamera::Stream *stream);
+ libcamera::Stream *stream, GstVideoInfo *info, gboolean add_video_meta);
libcamera::Stream *gst_libcamera_pool_get_stream(GstLibcameraPool *self);
@@ -150,6 +150,10 @@ struct _GstLibcameraSrc {
GstLibcameraSrcState *state;
GstLibcameraAllocator *allocator;
GstFlowCombiner *flow_combiner;
+
+ gboolean frame_copy;
+ GstVideoInfo info;
+ GstBufferPool *pool;
};
enum {
@@ -268,6 +272,43 @@ GstLibcameraSrcState::requestCompleted(Request *request)
gst_task_resume(src_->task);
}
+static GstFlowReturn
+gst_libcamera_video_frame_copy(GstLibcameraSrc *self, GstBuffer *src, GstBuffer *dest, guint32 stride)
+{
+ GstVideoInfo src_info = self->info;
+ GstVideoFrame src_frame, dest_frame;
+ gsize offset = 0;
+
+ for (guint i = 0; i < GST_VIDEO_INFO_N_PLANES(&src_info); i++) {
+ stride = gst_video_format_info_extrapolate_stride(src_info.finfo, i, stride);
+ src_info.stride[i] = stride;
+ src_info.offset[i] = offset;
+ offset += stride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(src_info.finfo, i,
+ GST_VIDEO_INFO_HEIGHT(&src_info));
+ }
+ src_info.size = gst_buffer_get_size(src);
+
+ if (!gst_video_frame_map(&src_frame, &src_info, src, GST_MAP_READ))
+ goto invalid_buffer;
+
+ if (!gst_video_frame_map(&dest_frame, &self->info, dest, GST_MAP_WRITE)) {
+ gst_video_frame_unmap(&src_frame);
+ goto invalid_buffer;
+ }
+
+ gst_video_frame_copy(&dest_frame, &src_frame);
+
+ gst_video_frame_unmap(&src_frame);
+ gst_video_frame_unmap(&dest_frame);
+
+ return GST_FLOW_OK;
+
+invalid_buffer : {
+ GST_ERROR_OBJECT(self, "Could not map buffer");
+ return GST_FLOW_ERROR;
+}
+}
+
/* Must be called with stream_lock held. */
int GstLibcameraSrcState::processRequest()
{
@@ -292,12 +333,34 @@ int GstLibcameraSrcState::processRequest()
GstFlowReturn ret = GST_FLOW_OK;
gst_flow_combiner_reset(src_->flow_combiner);
- for (GstPad *srcpad : srcpads_) {
+ for (gsize i = 0; i < src_->state->srcpads_.size(); i++) {
+ GstPad *srcpad = src_->state->srcpads_[i];
Stream *stream = gst_libcamera_pad_get_stream(srcpad);
GstBuffer *buffer = wrap->detachBuffer(stream);
+ GstBuffer *tmp = NULL;
+ const StreamConfiguration &stream_cfg = src_->state->config_->at(i);
FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer);
+ if (src_->frame_copy) {
+ ret = gst_buffer_pool_acquire_buffer(src_->pool, &tmp, NULL);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR("Failed to acquire buffer");
+ gst_buffer_unref(buffer);
+ return -EPIPE;
+ }
+
+ ret = gst_libcamera_video_frame_copy(src_, buffer, tmp, stream_cfg.stride);
+ gst_buffer_unref(buffer);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR("Failed to copy buffer");
+ gst_buffer_unref(tmp);
+ return -EPIPE;
+ }
+
+ buffer = tmp;
+ }
+
if (GST_CLOCK_TIME_IS_VALID(wrap->pts_)) {
GST_BUFFER_PTS(buffer) = wrap->pts_;
gst_libcamera_pad_set_latency(srcpad, wrap->latency_);
@@ -497,9 +560,74 @@ gst_libcamera_src_negotiate(GstLibcameraSrc *self)
for (gsize i = 0; i < state->srcpads_.size(); i++) {
GstPad *srcpad = state->srcpads_[i];
const StreamConfiguration &stream_cfg = state->config_->at(i);
+ gboolean add_video_meta = false;
+ GstVideoInfo info;
+
+ g_autoptr(GstCaps) caps = gst_libcamera_stream_configuration_to_caps(stream_cfg);
+ gst_libcamera_framerate_to_caps(caps, element_caps);
- GstLibcameraPool *pool = gst_libcamera_pool_new(self->allocator,
- stream_cfg.stream());
+ gst_video_info_init(&info);
+ gst_video_info_from_caps(&info, caps);
+
+ /* stride mismatch between camera stride and that calculated by video-info */
+ if (static_cast<unsigned int>(info.stride[0]) != stream_cfg.stride &&
+ GST_VIDEO_INFO_FORMAT(&info) != GST_VIDEO_FORMAT_ENCODED) {
+ GstQuery *query = NULL;
+ gboolean need_pool = true;
+
+ query = gst_query_new_allocation(caps, need_pool);
+ if (!gst_pad_peer_query(srcpad, query))
+ GST_DEBUG_OBJECT(self, "Didn't get downstream ALLOCATION hints");
+ else
+ add_video_meta = gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
+
+ if (add_video_meta) {
+ guint k, stride;
+ gsize offset = 0;
+
+ /* this should be updated if tiled formats get added in the future. */
+ for (k = 0; k < GST_VIDEO_INFO_N_PLANES(&info); k++) {
+ stride = gst_video_format_info_extrapolate_stride(info.finfo, k, stream_cfg.stride);
+ info.stride[k] = stride;
+ info.offset[k] = offset;
+ offset += stride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info.finfo, k,
+ GST_VIDEO_INFO_HEIGHT(&info));
+ }
+ } else {
+ GstBufferPool *pool = NULL;
+ self->frame_copy = true;
+
+ if (gst_query_get_n_allocation_pools(query) > 0)
+ gst_query_parse_nth_allocation_pool(query, 0, &pool, NULL, NULL, NULL);
+
+ if (self->pool)
+ gst_object_unref(self->pool);
+
+ if (pool)
+ self->pool = pool;
+ else {
+ GstStructure *config;
+ guint min_buffers = 3;
+ self->pool = gst_video_buffer_pool_new();
+
+ config = gst_buffer_pool_get_config(self->pool);
+ gst_buffer_pool_config_set_params(config, caps, info.size, min_buffers, 0);
+ gst_buffer_pool_set_config(GST_BUFFER_POOL_CAST(self->pool), config);
+ GST_DEBUG_OBJECT(self, "Own pool config is %" GST_PTR_FORMAT, config);
+ }
+
+ if (!gst_buffer_pool_set_active(self->pool, true)) {
+ GST_ERROR_OBJECT(self, "Failed to active buffer pool");
+ gst_caps_unref(caps);
+ return false;
+ }
+ }
+ gst_query_unref(query);
+ }
+
+ self->info = info;
+ GstLibcameraPool *pool = gst_libcamera_pool_new(self->allocator, stream_cfg.stream(),
+ &info, add_video_meta);
g_signal_connect_swapped(pool, "buffer-notify",
G_CALLBACK(gst_task_resume), self->task);
@@ -842,6 +970,12 @@ gst_libcamera_src_finalize(GObject *object)
GObjectClass *klass = G_OBJECT_CLASS(gst_libcamera_src_parent_class);
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(object);
+ if (self->pool) {
+ gst_buffer_pool_set_active(self->pool, false);
+ gst_object_unref(self->pool);
+ self->pool = NULL;
+ }
+
g_rec_mutex_clear(&self->stream_lock);
g_clear_object(&self->task);
g_mutex_clear(&self->state->lock_);
@@ -875,6 +1009,8 @@ gst_libcamera_src_init(GstLibcameraSrc *self)
/* C-style friend. */
state->src_ = self;
self->state = state;
+ self->frame_copy = false;
+ self->pool = NULL;
}
static GstPad *
GStreamer video-info calculated stride and offset may differ from those used by the camera. For stride and offset mismatch, this patch adds video meta to buffer if downstream supports VideoMeta through allocation query. Otherwise, get downstream provided buffer pool or create internal buffer pool using the caps, and copy video frame to the system memory acquired from the buffer pool. Signed-off-by: Hou Qi <qi.hou@nxp.com> --- src/gstreamer/gstlibcamera-utils.cpp | 33 +++++++ src/gstreamer/gstlibcamera-utils.h | 4 + src/gstreamer/gstlibcamerapool.cpp | 9 +- src/gstreamer/gstlibcamerapool.h | 3 +- src/gstreamer/gstlibcamerasrc.cpp | 142 ++++++++++++++++++++++++++- 5 files changed, 186 insertions(+), 5 deletions(-)