Message ID | 20210906020100.14430-1-laurent.pinchart@ideasonboard.com |
---|---|
Headers | show |
Series |
|
Related | show |
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > The inode is useful to check if two file descriptors refer to the same > file. Add a function to retrieve it. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > Changes since v1: > > - Use isValid() instead of open-coding it > - Print a message on error > --- > include/libcamera/file_descriptor.h | 3 +++ > src/libcamera/file_descriptor.cpp | 26 ++++++++++++++++++++++++++ > 2 files changed, 29 insertions(+) > > diff --git a/include/libcamera/file_descriptor.h b/include/libcamera/file_descriptor.h > index d514aac7697b..988f9b7a3d25 100644 > --- a/include/libcamera/file_descriptor.h > +++ b/include/libcamera/file_descriptor.h > @@ -8,6 +8,7 @@ > #define __LIBCAMERA_FILE_DESCRIPTOR_H__ > > #include <memory> > +#include <sys/types.h> > > namespace libcamera { > > @@ -27,6 +28,8 @@ public: > int fd() const { return fd_ ? fd_->fd() : -1; } > FileDescriptor dup() const; > > + ino_t inode() const; > + > private: > class Descriptor > { > diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp > index 9f9ebc81f738..0409c3e1758c 100644 > --- a/src/libcamera/file_descriptor.cpp > +++ b/src/libcamera/file_descriptor.cpp > @@ -8,6 +8,8 @@ > #include <libcamera/file_descriptor.h> > > #include <string.h> > +#include <sys/stat.h> > +#include <sys/types.h> > #include <unistd.h> > #include <utility> > > @@ -221,6 +223,30 @@ FileDescriptor FileDescriptor::dup() const > return FileDescriptor(fd()); > } > > +/** > + * \brief Retrieve the file descriptor inode > + * > + * \todo Should this move to the File class ? > + * > + * \return The file descriptor inode on success, or 0 on error > + */ > +ino_t FileDescriptor::inode() const > +{ > + if (!isValid()) > + return 0; > + > + struct stat st; > + int ret = fstat(fd_->fd(), &st); > + if (ret < 0) { > + ret = -errno; > + LOG(FileDescriptor, Fatal) > + << "Failed to fstat() fd: " << strerror(-ret); > + return 0; > + } > + > + return st.st_ino; > +} > + > FileDescriptor::Descriptor::Descriptor(int fd, bool duplicate) > { > if (!duplicate) { >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Replace manual looked for V4L2 pixel format in the PixelFormatInfo with > the V4L2PixelFormat::fromPixelFormat() helper function. This prepares > for multi-planar support that will modify how V4L2 pixel formats are > stored in PixelFormatInfo. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +--- > src/v4l2/v4l2_camera_proxy.cpp | 9 +++------ > 2 files changed, 4 insertions(+), 9 deletions(-) > > diff --git a/src/libcamera/pipeline/ipu3/cio2.cpp b/src/libcamera/pipeline/ipu3/cio2.cpp > index 9cedcb5b2879..dc62ab197acb 100644 > --- a/src/libcamera/pipeline/ipu3/cio2.cpp > +++ b/src/libcamera/pipeline/ipu3/cio2.cpp > @@ -203,9 +203,7 @@ int CIO2Device::configure(const Size &size, V4L2DeviceFormat *outputFormat) > if (itInfo == mbusCodesToPixelFormat.end()) > return -EINVAL; > > - const PixelFormatInfo &info = PixelFormatInfo::info(itInfo->second); > - > - outputFormat->fourcc = info.v4l2Format; > + outputFormat->fourcc = V4L2PixelFormat::fromPixelFormat(itInfo->second); > outputFormat->size = sensorFormat.size; > outputFormat->planesCount = 1; > > diff --git a/src/v4l2/v4l2_camera_proxy.cpp b/src/v4l2/v4l2_camera_proxy.cpp > index 07b1a90aa32f..d926a7b77083 100644 > --- a/src/v4l2/v4l2_camera_proxy.cpp > +++ b/src/v4l2/v4l2_camera_proxy.cpp > @@ -164,12 +164,11 @@ bool V4L2CameraProxy::validateMemoryType(uint32_t memory) > > void V4L2CameraProxy::setFmtFromConfig(const StreamConfiguration &streamConfig) > { > - const PixelFormatInfo &info = PixelFormatInfo::info(streamConfig.pixelFormat); > const Size &size = streamConfig.size; > > v4l2PixFormat_.width = size.width; > v4l2PixFormat_.height = size.height; > - v4l2PixFormat_.pixelformat = info.v4l2Format; > + v4l2PixFormat_.pixelformat = V4L2PixelFormat::fromPixelFormat(streamConfig.pixelFormat); > v4l2PixFormat_.field = V4L2_FIELD_NONE; > v4l2PixFormat_.bytesperline = streamConfig.stride; > v4l2PixFormat_.sizeimage = streamConfig.frameSize; > @@ -276,7 +275,7 @@ int V4L2CameraProxy::vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc * > /* \todo Add map from format to description. */ > utils::strlcpy(reinterpret_cast<char *>(arg->description), > "Video Format Description", sizeof(arg->description)); > - arg->pixelformat = PixelFormatInfo::info(format).v4l2Format; > + arg->pixelformat = V4L2PixelFormat::fromPixelFormat(format); > > memset(arg->reserved, 0, sizeof(arg->reserved)); > > @@ -311,11 +310,9 @@ int V4L2CameraProxy::tryFormat(struct v4l2_format *arg) > return -EINVAL; > } > > - const PixelFormatInfo &info = PixelFormatInfo::info(config.pixelFormat); > - > arg->fmt.pix.width = config.size.width; > arg->fmt.pix.height = config.size.height; > - arg->fmt.pix.pixelformat = info.v4l2Format; > + arg->fmt.pix.pixelformat = V4L2PixelFormat::fromPixelFormat(config.pixelFormat); > arg->fmt.pix.field = V4L2_FIELD_NONE; > arg->fmt.pix.bytesperline = config.stride; > arg->fmt.pix.sizeimage = config.frameSize; >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Multi-planar frame buffers can store their planes contiguously in > memory, or split them in discontiguous memory areas. Add a private > function to check in which of these two categories the frame buffer > belongs. This will be used to correctly handle the differences between > the V4L2 single and multi planar APIs. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > --- > Changes v1: > > - Merge both loops in FrameBuffer::FrameBuffer() > --- > include/libcamera/internal/framebuffer.h | 2 ++ > src/libcamera/framebuffer.cpp | 45 ++++++++++++++++++++++-- > 2 files changed, 45 insertions(+), 2 deletions(-) > > diff --git a/include/libcamera/internal/framebuffer.h b/include/libcamera/internal/framebuffer.h > index 606aed2b4782..cd33c295466e 100644 > --- a/include/libcamera/internal/framebuffer.h > +++ b/include/libcamera/internal/framebuffer.h > @@ -21,9 +21,11 @@ public: > Private(); > > void setRequest(Request *request) { request_ = request; } > + bool isContiguous() const { return isContiguous_; } > > private: > Request *request_; > + bool isContiguous_; > }; > > } /* namespace libcamera */ > diff --git a/src/libcamera/framebuffer.cpp b/src/libcamera/framebuffer.cpp > index ad63a34a83bf..e71c2ffae034 100644 > --- a/src/libcamera/framebuffer.cpp > +++ b/src/libcamera/framebuffer.cpp > @@ -106,7 +106,7 @@ LOG_DEFINE_CATEGORY(Buffer) > */ > > FrameBuffer::Private::Private() > - : request_(nullptr) > + : request_(nullptr), isContiguous_(true) > { > } > > @@ -120,6 +120,17 @@ FrameBuffer::Private::Private() > * handlers, it is called by the pipeline handlers themselves. > */ > > +/** > + * \fn FrameBuffer::Private::isContiguous() > + * \brief Check if the frame buffer stores planes contiguously in memory > + * > + * Multi-planar frame buffers can store their planes contiguously in memory, or > + * split them into discontiguous memory areas. This function checks in which of > + * these two categories the frame buffer belongs. > + * > + * \return True if the planes are stored contiguously in memory, false otherwise > + */ > + > /** > * \class FrameBuffer > * \brief Frame buffer data and its associated dynamic metadata > @@ -199,8 +210,38 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie) > : Extensible(std::make_unique<Private>()), planes_(planes), > cookie_(cookie) > { > - for (const auto &plane : planes_) > + unsigned int offset = 0; > + bool isContiguous = true; > + ino_t inode = 0; > + > + for (const auto &plane : planes_) { > ASSERT(plane.offset != Plane::kInvalidOffset); > + > + if (plane.offset != offset) { > + isContiguous = false; > + break; > + } > + > + /* > + * Two different dmabuf file descriptors may still refer to the > + * same dmabuf instance. Check this using inodes. > + */ How is that possible ? > + if (plane.fd.fd() != planes_[0].fd.fd()) { > + if (!inode) > + inode = planes_[0].fd.inode(); > + if (plane.fd.inode() != inode) { > + isContiguous = false; > + break; > + } > + } > + > + offset += plane.length; > + } > + > + LOG(Buffer, Debug) > + << "Buffer is " << (isContiguous ? "not " : "") << "contiguous"; > + > + _d()->isContiguous_ = isContiguous; > } > > /** >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > When creating FrameBuffer instances, the V4L2VideoDevice computes plane > offsets using minimal stride for the format. This doesn't always produce > a valid result when the device requires padding at the end of lines. Fix > it by computing offsets using the stride reported by V4L2. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/libcamera/v4l2_videodevice.cpp | 16 +++++++++++++--- > 1 file changed, 13 insertions(+), 3 deletions(-) > > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > index 88535f5a07c7..c6c9263c49e9 100644 > --- a/src/libcamera/v4l2_videodevice.cpp > +++ b/src/libcamera/v4l2_videodevice.cpp > @@ -1354,11 +1354,21 @@ std::unique_ptr<FrameBuffer> V4L2VideoDevice::createBuffer(unsigned int index) > size_t offset = 0; > > for (size_t i = 0; i < planes.size(); ++i) { > + /* > + * The stride is reported by V4L2 for the first plane > + * only. Compute the stride of the other planes by > + * taking the horizontal subsampling factor into > + * account, which is equal to the bytesPerGroup ratio of > + * the planes. > + */ > + unsigned int stride = format_.planes[0].bpl > + * formatInfo_->planes[i].bytesPerGroup > + / formatInfo_->planes[0].bytesPerGroup; > + > planes[i].fd = fd; > planes[i].offset = offset; > - > - /* \todo Take the V4L2 stride into account */ > - planes[i].length = formatInfo_->planeSize(format_.size, i); > + planes[i].length = formatInfo_->planeSize(format_.size.height, > + i, stride); > offset += planes[i].length; > } > } >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > The metadata planes are allocated by V4L2VideoDevice when dequeuing a > buffer. This causes the metadata planes to only be allocated after a > buffer gets dequeued, and doesn't provide any strong guarantee that > their number matches the number of FrameBuffer planes. The lack of this > invariant makes the FrameBuffer class fragile. > > As a first step towards fixing this, allocate the metadata planes when > the FrameBuffer is constructed. The FrameMetadata API should be further > improved by preventing a change in the number of planes. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > Changes since v1: > > - Return buffer with state set to FrameError on error > --- > src/libcamera/framebuffer.cpp | 2 ++ > src/libcamera/v4l2_videodevice.cpp | 10 +++++----- > 2 files changed, 7 insertions(+), 5 deletions(-) > > diff --git a/src/libcamera/framebuffer.cpp b/src/libcamera/framebuffer.cpp > index e71c2ffae034..e4f8419a9063 100644 > --- a/src/libcamera/framebuffer.cpp > +++ b/src/libcamera/framebuffer.cpp > @@ -210,6 +210,8 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie) > : Extensible(std::make_unique<Private>()), planes_(planes), > cookie_(cookie) > { > + metadata_.planes.resize(planes_.size()); > + > unsigned int offset = 0; > bool isContiguous = true; > ino_t inode = 0; > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > index 59aa53c7c27e..e729e608448c 100644 > --- a/src/libcamera/v4l2_videodevice.cpp > +++ b/src/libcamera/v4l2_videodevice.cpp > @@ -1670,7 +1670,6 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > > unsigned int numV4l2Planes = multiPlanar ? buf.length : 1; > FrameMetadata &metadata = buffer->metadata_; > - metadata.planes.clear(); > > if (numV4l2Planes != buffer->planes().size()) { > /* > @@ -1700,8 +1699,9 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > return buffer; > } > > - metadata.planes.push_back({ std::min(plane.length, bytesused) }); > - bytesused -= metadata.planes.back().bytesused; > + metadata.planes[i].bytesused = > + std::min(plane.length, bytesused); > + bytesused -= metadata.planes[i].bytesused; > } > } else if (multiPlanar) { > /* > @@ -1710,9 +1710,9 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > * V4L2 buffer is guaranteed to be equal at this point. > */ > for (unsigned int i = 0; i < numV4l2Planes; ++i) > - metadata.planes.push_back({ planes[i].bytesused }); > + metadata.planes[i].bytesused = planes[i].bytesused; > } else { > - metadata.planes.push_back({ buf.bytesused }); > + metadata.planes[0].bytesused = buf.bytesused; > } > > return buffer; >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Now that libcamera correctly supports frame buffers with different > dmabuf for each plane, remove the assumption that a single dmabuf is > used. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/android/camera_device.cpp | 25 ++++++------------------- > 1 file changed, 6 insertions(+), 19 deletions(-) > > diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp > index 8ca76719a50f..c64064106ccc 100644 > --- a/src/android/camera_device.cpp > +++ b/src/android/camera_device.cpp > @@ -749,25 +749,6 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > libcamera::PixelFormat pixelFormat, > const libcamera::Size &size) > { > - FileDescriptor fd; > - /* > - * This assumes all the planes are in the same dmabuf. > - * > - * \todo Verify that this assumption holds, fstat() can be used to check > - * if two fds refer to the same dmabuf. > - */ > - for (int i = 0; i < camera3buffer->numFds; i++) { > - if (camera3buffer->data[i] != -1) { > - fd = FileDescriptor(camera3buffer->data[i]); > - break; > - } > - } > - > - if (!fd.isValid()) { > - LOG(HAL, Fatal) << "No valid fd"; > - return nullptr; > - } > - > CameraBuffer buf(camera3buffer, pixelFormat, size, PROT_READ); > if (!buf.isValid()) { > LOG(HAL, Fatal) << "Failed to create CameraBuffer"; > @@ -776,6 +757,12 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > std::vector<FrameBuffer::Plane> planes(buf.numPlanes()); > for (size_t i = 0; i < buf.numPlanes(); ++i) { > + FileDescriptor fd{ camera3buffer->data[i] }; > + if (!fd.isValid()) { > + LOG(HAL, Fatal) << "No valid fd"; > + return nullptr; > + } > + > planes[i].fd = fd; > planes[i].offset = buf.offset(i); > planes[i].length = buf.size(i); >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > The new Image class represents a multi-planar image with direct access > to pixel data. It currently duplicates the function of the > MappedFrameBuffer class which is internal to libcamera, and will serve > as a design playground to improve the API until it is considered ready > to be made part of the libcamera public API. I like the idea, maybe add some documentation already in the class ? > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > --- > src/cam/image.cpp | 107 ++++++++++++++++++++++++++++++++++++++++++++ > src/cam/image.h | 52 +++++++++++++++++++++ > src/cam/meson.build | 1 + > 3 files changed, 160 insertions(+) > create mode 100644 src/cam/image.cpp > create mode 100644 src/cam/image.h > > diff --git a/src/cam/image.cpp b/src/cam/image.cpp > new file mode 100644 > index 000000000000..7ae5f52dccb4 > --- /dev/null > +++ b/src/cam/image.cpp > @@ -0,0 +1,107 @@ > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > +/* > + * Copyright (C) 2021, Google Inc. > + * > + * image.cpp - Multi-planar image with access to pixel data > + */ > + > +#include "image.h" > + > +#include <assert.h> > +#include <errno.h> > +#include <iostream> > +#include <map> > +#include <string.h> > +#include <sys/mman.h> > +#include <unistd.h> > + > +using namespace libcamera; > + > +std::unique_ptr<Image> Image::fromFrameBuffer(const FrameBuffer *buffer, MapMode mode) Can you see a use case for Image::toFrameBuffer not implemented yet ? What would be in this class apart from a conversion from a FrameBuffer to a Image ? > +{ > + std::unique_ptr<Image> image{ new Image() }; > + > + assert(!buffer->planes().empty()); > + > + int mmapFlags = 0; > + > + if (mode & MapMode::ReadOnly) > + mmapFlags |= PROT_READ; > + > + if (mode & MapMode::WriteOnly) > + mmapFlags |= PROT_WRITE; > + > + struct MappedBufferInfo { > + uint8_t *address = nullptr; > + size_t mapLength = 0; > + size_t dmabufLength = 0; > + }; > + std::map<int, MappedBufferInfo> mappedBuffers; > + > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > + const int fd = plane.fd.fd(); > + if (mappedBuffers.find(fd) == mappedBuffers.end()) { > + const size_t length = lseek(fd, 0, SEEK_END); > + mappedBuffers[fd] = MappedBufferInfo{ nullptr, 0, length }; > + } > + > + const size_t length = mappedBuffers[fd].dmabufLength; > + > + if (plane.offset > length || > + plane.offset + plane.length > length) { > + std::cerr << "plane is out of buffer: buffer length=" > + << length << ", plane offset=" << plane.offset > + << ", plane length=" << plane.length > + << std::endl; > + return nullptr; > + } > + size_t &mapLength = mappedBuffers[fd].mapLength; > + mapLength = std::max(mapLength, > + static_cast<size_t>(plane.offset + plane.length)); > + } > + > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > + const int fd = plane.fd.fd(); > + auto &info = mappedBuffers[fd]; > + if (!info.address) { > + void *address = mmap(nullptr, info.mapLength, mmapFlags, > + MAP_SHARED, fd, 0); > + if (address == MAP_FAILED) { > + int error = -errno; > + std::cerr << "Failed to mmap plane: " > + << strerror(-error) << std::endl; > + return nullptr; > + } > + > + info.address = static_cast<uint8_t *>(address); > + image->maps_.emplace_back(info.address, info.mapLength); > + } > + > + image->planes_.emplace_back(info.address + plane.offset, plane.length); > + } > + Why are you using two loops on buffer->planes() ? Is it for code clarity or something I did not get ? > + return image; > +} > + > +Image::Image() = default; > + > +Image::~Image() > +{ > + for (Span<uint8_t> &map : maps_) > + munmap(map.data(), map.size()); > +} > + > +unsigned int Image::numPlanes() const > +{ > + return planes_.size(); > +} > + > +Span<uint8_t> Image::data(unsigned int plane) > +{ > + return planes_[plane]; > +} > + > +Span<const uint8_t> Image::data(unsigned int plane) const > +{ > + return planes_[plane]; > +} > diff --git a/src/cam/image.h b/src/cam/image.h > new file mode 100644 > index 000000000000..1ce5f84e5f9e > --- /dev/null > +++ b/src/cam/image.h > @@ -0,0 +1,52 @@ > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > +/* > + * Copyright (C) 2021, Google Inc. > + * > + * image.h - Multi-planar image with access to pixel data > + */ > +#ifndef __CAM_IMAGE_H__ > +#define __CAM_IMAGE_H__ > + > +#include <memory> > +#include <stdint.h> > +#include <vector> > + > +#include <libcamera/base/class.h> > +#include <libcamera/base/flags.h> > +#include <libcamera/base/span.h> > + > +#include <libcamera/framebuffer.h> > + > +class Image > +{ > +public: > + enum class MapMode { > + ReadOnly = 1 << 0, > + WriteOnly = 1 << 1, > + ReadWrite = ReadOnly | WriteOnly, > + }; > + > + static std::unique_ptr<Image> fromFrameBuffer(const libcamera::FrameBuffer *buffer, > + MapMode mode); > + > + ~Image(); > + > + unsigned int numPlanes() const; > + > + libcamera::Span<uint8_t> data(unsigned int plane); > + libcamera::Span<const uint8_t> data(unsigned int plane) const; > + > +private: > + LIBCAMERA_DISABLE_COPY(Image) > + > + Image(); > + > + std::vector<libcamera::Span<uint8_t>> maps_; > + std::vector<libcamera::Span<uint8_t>> planes_; > +}; > + > +namespace libcamera { > +LIBCAMERA_FLAGS_ENABLE_OPERATORS(Image::MapMode) > +} > + > +#endif /* __CAM_IMAGE_H__ */ > diff --git a/src/cam/meson.build b/src/cam/meson.build > index ea36aaa5c514..e8e2ae57d3f4 100644 > --- a/src/cam/meson.build > +++ b/src/cam/meson.build > @@ -14,6 +14,7 @@ cam_sources = files([ > 'event_loop.cpp', > 'file_sink.cpp', > 'frame_sink.cpp', > + 'image.cpp', > 'main.cpp', > 'options.cpp', > 'stream_options.cpp', >
Hi Laurent, On Mon, Sep 06, 2021 at 05:00:35AM +0300, Laurent Pinchart wrote: > The inode is useful to check if two file descriptors refer to the same > file. Add a function to retrieve it. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> Reviewed-by: Paul Elder <paul.elder@ideasonboard.com> > --- > Changes since v1: > > - Use isValid() instead of open-coding it > - Print a message on error > --- > include/libcamera/file_descriptor.h | 3 +++ > src/libcamera/file_descriptor.cpp | 26 ++++++++++++++++++++++++++ > 2 files changed, 29 insertions(+) > > diff --git a/include/libcamera/file_descriptor.h b/include/libcamera/file_descriptor.h > index d514aac7697b..988f9b7a3d25 100644 > --- a/include/libcamera/file_descriptor.h > +++ b/include/libcamera/file_descriptor.h > @@ -8,6 +8,7 @@ > #define __LIBCAMERA_FILE_DESCRIPTOR_H__ > > #include <memory> > +#include <sys/types.h> > > namespace libcamera { > > @@ -27,6 +28,8 @@ public: > int fd() const { return fd_ ? fd_->fd() : -1; } > FileDescriptor dup() const; > > + ino_t inode() const; > + > private: > class Descriptor > { > diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp > index 9f9ebc81f738..0409c3e1758c 100644 > --- a/src/libcamera/file_descriptor.cpp > +++ b/src/libcamera/file_descriptor.cpp > @@ -8,6 +8,8 @@ > #include <libcamera/file_descriptor.h> > > #include <string.h> > +#include <sys/stat.h> > +#include <sys/types.h> > #include <unistd.h> > #include <utility> > > @@ -221,6 +223,30 @@ FileDescriptor FileDescriptor::dup() const > return FileDescriptor(fd()); > } > > +/** > + * \brief Retrieve the file descriptor inode > + * > + * \todo Should this move to the File class ? > + * > + * \return The file descriptor inode on success, or 0 on error > + */ > +ino_t FileDescriptor::inode() const > +{ > + if (!isValid()) > + return 0; > + > + struct stat st; > + int ret = fstat(fd_->fd(), &st); > + if (ret < 0) { > + ret = -errno; > + LOG(FileDescriptor, Fatal) > + << "Failed to fstat() fd: " << strerror(-ret); > + return 0; > + } > + > + return st.st_ino; > +} > + > FileDescriptor::Descriptor::Descriptor(int fd, bool duplicate) > { > if (!duplicate) { > -- > Regards, > > Laurent Pinchart >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Replace the manual implementation of frame buffer mapping with the Image > class to improve code sharing. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> It improves reading indeed ! Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/cam/file_sink.cpp | 42 +++++++++++++----------------------------- > src/cam/file_sink.h | 6 ++++-- > 2 files changed, 17 insertions(+), 31 deletions(-) > > diff --git a/src/cam/file_sink.cpp b/src/cam/file_sink.cpp > index 0fc7d621f50b..3c2e565b27a2 100644 > --- a/src/cam/file_sink.cpp > +++ b/src/cam/file_sink.cpp > @@ -5,17 +5,18 @@ > * file_sink.cpp - File Sink > */ > > +#include <assert.h> > #include <fcntl.h> > #include <iomanip> > #include <iostream> > #include <sstream> > #include <string.h> > -#include <sys/mman.h> > #include <unistd.h> > > #include <libcamera/camera.h> > > #include "file_sink.h" > +#include "image.h" > > using namespace libcamera; > > @@ -26,12 +27,6 @@ FileSink::FileSink(const std::string &pattern) > > FileSink::~FileSink() > { > - for (auto &iter : mappedBuffers_) { > - void *memory = iter.second.first; > - unsigned int length = iter.second.second; > - munmap(memory, length); > - } > - mappedBuffers_.clear(); > } > > int FileSink::configure(const libcamera::CameraConfiguration &config) > @@ -51,23 +46,11 @@ int FileSink::configure(const libcamera::CameraConfiguration &config) > > void FileSink::mapBuffer(FrameBuffer *buffer) > { > - /* \todo use MappedFrameBuffer. */ > - for (const FrameBuffer::Plane &plane : buffer->planes()) { > - const int fd = plane.fd.fd(); > - if (mappedBuffers_.find(fd) == mappedBuffers_.end()) { > - /** > - * \todo Should we try to only map the portions of the > - * dmabuf that are used by planes ? > - */ > - size_t length = lseek(fd, 0, SEEK_END); > - void *memory = mmap(NULL, plane.length, PROT_READ, > - MAP_SHARED, fd, 0); > - mappedBuffers_[fd] = std::make_pair(memory, length); > - } > + std::unique_ptr<Image> image = > + Image::fromFrameBuffer(buffer, Image::MapMode::ReadOnly); > + assert(image != nullptr); > > - void *memory = mappedBuffers_[fd].first; > - planeData_[&plane] = static_cast<uint8_t *>(memory) + plane.offset; > - } > + mappedBuffers_[buffer] = std::move(image); > } > > bool FileSink::processRequest(Request *request) > @@ -108,19 +91,20 @@ void FileSink::writeBuffer(const Stream *stream, FrameBuffer *buffer) > return; > } > > + Image *image = mappedBuffers_[buffer].get(); > + > for (unsigned int i = 0; i < buffer->planes().size(); ++i) { > - const FrameBuffer::Plane &plane = buffer->planes()[i]; > const FrameMetadata::Plane &meta = buffer->metadata().planes()[i]; > > - uint8_t *data = planeData_[&plane]; > - unsigned int length = std::min(meta.bytesused, plane.length); > + Span<uint8_t> data = image->data(i); > + unsigned int length = std::min<unsigned int>(meta.bytesused, data.size()); > > - if (meta.bytesused > plane.length) > + if (meta.bytesused > data.size()) > std::cerr << "payload size " << meta.bytesused > - << " larger than plane size " << plane.length > + << " larger than plane size " << data.size() > << std::endl; > > - ret = ::write(fd, data, length); > + ret = ::write(fd, data.data(), length); > if (ret < 0) { > ret = -errno; > std::cerr << "write error: " << strerror(-ret) > diff --git a/src/cam/file_sink.h b/src/cam/file_sink.h > index c12325d955c5..335be93b8732 100644 > --- a/src/cam/file_sink.h > +++ b/src/cam/file_sink.h > @@ -8,12 +8,15 @@ > #define __CAM_FILE_SINK_H__ > > #include <map> > +#include <memory> > #include <string> > > #include <libcamera/stream.h> > > #include "frame_sink.h" > > +class Image; > + > class FileSink : public FrameSink > { > public: > @@ -32,8 +35,7 @@ private: > > std::map<const libcamera::Stream *, std::string> streamNames_; > std::string pattern_; > - std::map<int, std::pair<void *, unsigned int>> mappedBuffers_; > - std::map<const libcamera::FrameBuffer::Plane *, uint8_t *> planeData_; > + std::map<libcamera::FrameBuffer *, std::unique_ptr<Image>> mappedBuffers_; > }; > > #endif /* __CAM_FILE_SINK_H__ */ >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > The stride is not always identical for all planes for multi-planar > formats. Semi-planar YUV formats without horizontal subsampling often > have a chroma stride equal to twice the luma stride, and tri-planar YUV > formats with a 1/2 horizontal subsampling often have a chroma stride > equal to half the luma stride. This isn't correctly taken into account > when creating a DRM frame buffer, as the same stride is set for all > planes. > > libcamera doesn't report per-plane stride values yet, but uses chroma > strides that match the above description for all currently supported > platforms. Calculation the chrome strides appropriately in the KMSSink > class, and pass them to DRM::createFrameBuffer(). > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/cam/drm.cpp | 7 +++---- > src/cam/drm.h | 5 ++++- > src/cam/kms_sink.cpp | 28 +++++++++++++++++++++++++++- > 3 files changed, 34 insertions(+), 6 deletions(-) > > diff --git a/src/cam/drm.cpp b/src/cam/drm.cpp > index da317e27cb19..ac47b8bd3287 100644 > --- a/src/cam/drm.cpp > +++ b/src/cam/drm.cpp > @@ -595,12 +595,12 @@ const Object *Device::object(uint32_t id) > std::unique_ptr<FrameBuffer> Device::createFrameBuffer( > const libcamera::FrameBuffer &buffer, > const libcamera::PixelFormat &format, > - const libcamera::Size &size, unsigned int stride) > + const libcamera::Size &size, > + const std::array<uint32_t, 4> &strides) > { > std::unique_ptr<FrameBuffer> fb{ new FrameBuffer(this) }; > > uint32_t handles[4] = {}; > - uint32_t pitches[4] = {}; > uint32_t offsets[4] = {}; > int ret; > > @@ -623,13 +623,12 @@ std::unique_ptr<FrameBuffer> Device::createFrameBuffer( > fb->planes_.push_back({ handle }); > > handles[i] = handle; > - pitches[i] = stride; > offsets[i] = 0; /* TODO */ > ++i; > } > > ret = drmModeAddFB2(fd_, size.width, size.height, format.fourcc(), handles, > - pitches, offsets, &fb->id_, 0); > + strides.data(), offsets, &fb->id_, 0); > if (ret < 0) { > ret = -errno; > std::cerr > diff --git a/src/cam/drm.h b/src/cam/drm.h > index ee2304025208..00f7e798b771 100644 > --- a/src/cam/drm.h > +++ b/src/cam/drm.h > @@ -7,9 +7,11 @@ > #ifndef __CAM_DRM_H__ > #define __CAM_DRM_H__ > > +#include <array> > #include <list> > #include <map> > #include <memory> > +#include <stdint.h> > #include <string> > #include <vector> > > @@ -298,7 +300,8 @@ public: > std::unique_ptr<FrameBuffer> createFrameBuffer( > const libcamera::FrameBuffer &buffer, > const libcamera::PixelFormat &format, > - const libcamera::Size &size, unsigned int stride); > + const libcamera::Size &size, > + const std::array<uint32_t, 4> &strides); > > libcamera::Signal<AtomicRequest *> requestComplete; > > diff --git a/src/cam/kms_sink.cpp b/src/cam/kms_sink.cpp > index 8c0b79c63922..658192efc105 100644 > --- a/src/cam/kms_sink.cpp > +++ b/src/cam/kms_sink.cpp > @@ -7,10 +7,12 @@ > > #include "kms_sink.h" > > +#include <array> > #include <algorithm> > #include <assert.h> > #include <iostream> > #include <memory> > +#include <stdint.h> > #include <string.h> > > #include <libcamera/camera.h> > @@ -65,8 +67,32 @@ KMSSink::KMSSink(const std::string &connectorName) > > void KMSSink::mapBuffer(libcamera::FrameBuffer *buffer) > { > + std::array<uint32_t, 4> strides = {}; > + > + /* \todo Should libcamera report per-plane strides ? */ > + unsigned int uvStrideMultiplier; > + > + switch (format_) { > + case libcamera::formats::NV24: > + case libcamera::formats::NV42: > + uvStrideMultiplier = 4; > + break; > + case libcamera::formats::YUV420: > + case libcamera::formats::YVU420: > + case libcamera::formats::YUV422: > + uvStrideMultiplier = 1; > + break; > + default: > + uvStrideMultiplier = 2; > + break; > + } > + > + strides[0] = stride_; > + for (unsigned int i = 1; i < buffer->planes().size(); ++i) > + strides[i] = stride_ * uvStrideMultiplier / 2; > + > std::unique_ptr<DRM::FrameBuffer> drmBuffer = > - dev_.createFrameBuffer(*buffer, format_, size_, stride_); > + dev_.createFrameBuffer(*buffer, format_, size_, strides); > if (!drmBuffer) > return; > >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Now that libcamera supports per-plane offsets, pass the values to > drmModeAddFB2(). > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/cam/drm.cpp | 2 +- > 1 file changed, 1 insertion(+), 1 deletion(-) > > diff --git a/src/cam/drm.cpp b/src/cam/drm.cpp > index ac47b8bd3287..d5a75d039fd8 100644 > --- a/src/cam/drm.cpp > +++ b/src/cam/drm.cpp > @@ -623,7 +623,7 @@ std::unique_ptr<FrameBuffer> Device::createFrameBuffer( > fb->planes_.push_back({ handle }); > > handles[i] = handle; > - offsets[i] = 0; /* TODO */ > + offsets[i] = plane.offset; > ++i; > } > >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > When creating a DRM frame buffer, the dmabufs for the planes are > imported as GEM objects. For multi-planar formats, all planes may use > the same dmabuf, which results in multiple imports. This doesn't cause > any issue at import time, as DRM detects this situation and returns the > same GEM object. However, when destroying the frame buffer, the same GEM > object ends up being closed multiple times, which generates an error. > > Fix this by avoiding multiple imports of the same dmabuf for the same > frame buffer. While the issue may theoretically occur with identical > dmabufs for different frame buffers, this is quite unlikely and is thus > not addressed. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > ---Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > src/cam/drm.cpp | 29 +++++++++++++++++------------ > src/cam/drm.h | 2 +- > 2 files changed, 18 insertions(+), 13 deletions(-) > > diff --git a/src/cam/drm.cpp b/src/cam/drm.cpp > index d5a75d039fd8..f25300913a7f 100644 > --- a/src/cam/drm.cpp > +++ b/src/cam/drm.cpp > @@ -283,9 +283,9 @@ FrameBuffer::FrameBuffer(Device *dev) > > FrameBuffer::~FrameBuffer() > { > - for (FrameBuffer::Plane &plane : planes_) { > + for (const auto &plane : planes_) { > struct drm_gem_close gem_close = { > - .handle = plane.handle, > + .handle = plane.second.handle, > .pad = 0, > }; > int ret; > @@ -605,22 +605,27 @@ std::unique_ptr<FrameBuffer> Device::createFrameBuffer( > int ret; > > const std::vector<libcamera::FrameBuffer::Plane> &planes = buffer.planes(); > - fb->planes_.reserve(planes.size()); > > unsigned int i = 0; > for (const libcamera::FrameBuffer::Plane &plane : planes) { > + int fd = plane.fd.fd(); > uint32_t handle; > > - ret = drmPrimeFDToHandle(fd_, plane.fd.fd(), &handle); > - if (ret < 0) { > - ret = -errno; > - std::cerr > - << "Unable to import framebuffer dmabuf: " > - << strerror(-ret) << std::endl; > - return nullptr; > - } > + auto iter = fb->planes_.find(fd); > + if (iter == fb->planes_.end()) { > + ret = drmPrimeFDToHandle(fd_, plane.fd.fd(), &handle); > + if (ret < 0) { > + ret = -errno; > + std::cerr > + << "Unable to import framebuffer dmabuf: " > + << strerror(-ret) << std::endl; > + return nullptr; > + } > > - fb->planes_.push_back({ handle }); > + fb->planes_[fd] = { handle }; > + } else { > + handle = iter->second.handle; > + } > > handles[i] = handle; > offsets[i] = plane.offset; > diff --git a/src/cam/drm.h b/src/cam/drm.h > index 00f7e798b771..0b88f9a33912 100644 > --- a/src/cam/drm.h > +++ b/src/cam/drm.h > @@ -242,7 +242,7 @@ private: > > FrameBuffer(Device *dev); > > - std::vector<Plane> planes_; > + std::map<int, Plane> planes_; > }; > > class AtomicRequest >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Replace the manual implementation of frame buffer mapping with the Image > class to improve code sharing. The ViewFinder API is updated to take an > Image pointer in the render() function to prepare for multi-planar > buffer support. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > --- > src/qcam/main_window.cpp | 31 ++++++++----------------------- > src/qcam/main_window.h | 4 ++-- > src/qcam/meson.build | 1 + > src/qcam/viewfinder.h | 6 +++--- > src/qcam/viewfinder_gl.cpp | 7 ++++--- > src/qcam/viewfinder_gl.h | 2 +- > src/qcam/viewfinder_qt.cpp | 6 +++--- > src/qcam/viewfinder_qt.h | 2 +- > 8 files changed, 23 insertions(+), 36 deletions(-) > > diff --git a/src/qcam/main_window.cpp b/src/qcam/main_window.cpp > index 0a00b1001570..168dd5ce30e3 100644 > --- a/src/qcam/main_window.cpp > +++ b/src/qcam/main_window.cpp > @@ -7,10 +7,9 @@ > > #include "main_window.h" > > +#include <assert.h> > #include <iomanip> > #include <string> > -#include <sys/mman.h> > -#include <unistd.h> > > #include <QComboBox> > #include <QCoreApplication> > @@ -29,6 +28,7 @@ > #include <libcamera/camera_manager.h> > #include <libcamera/version.h> > > +#include "../cam/image.h" > #include "dng_writer.h" > #ifndef QT_NO_OPENGL > #include "viewfinder_gl.h" > @@ -473,15 +473,10 @@ int MainWindow::startCapture() > > for (const std::unique_ptr<FrameBuffer> &buffer : allocator_->buffers(stream)) { > /* Map memory buffers and cache the mappings. */ > - const FrameBuffer::Plane &plane = buffer->planes().front(); > - size_t length = lseek(plane.fd.fd(), 0, SEEK_END); > - void *memory = mmap(NULL, length, PROT_READ, MAP_SHARED, > - plane.fd.fd(), 0); > - > - mappedBuffers_[buffer.get()] = { static_cast<uint8_t *>(memory), > - plane.length }; > - planeData_[buffer.get()] = { static_cast<uint8_t *>(memory) + plane.offset, > - plane.length }; > + std::unique_ptr<Image> image = > + Image::fromFrameBuffer(buffer.get(), Image::MapMode::ReadOnly); > + assert(image != nullptr); > + mappedBuffers_[buffer.get()] = std::move(image); > > /* Store buffers on the free list. */ > freeBuffers_[stream].enqueue(buffer.get()); > @@ -543,12 +538,7 @@ error_disconnect: > error: > requests_.clear(); > > - for (auto &iter : mappedBuffers_) { > - const Span<uint8_t> &buffer = iter.second; > - munmap(buffer.data(), buffer.size()); > - } > mappedBuffers_.clear(); > - planeData_.clear(); > > freeBuffers_.clear(); > > @@ -580,12 +570,7 @@ void MainWindow::stopCapture() > > camera_->requestCompleted.disconnect(this); > > - for (auto &iter : mappedBuffers_) { > - const Span<uint8_t> &buffer = iter.second; > - munmap(buffer.data(), buffer.size()); > - } > mappedBuffers_.clear(); > - planeData_.clear(); > > requests_.clear(); > freeQueue_.clear(); > @@ -682,7 +667,7 @@ void MainWindow::processRaw(FrameBuffer *buffer, > "DNG Files (*.dng)"); > > if (!filename.isEmpty()) { > - uint8_t *memory = planeData_[buffer].data(); > + uint8_t *memory = mappedBuffers_[buffer]->data(0).data(); > DNGWriter::write(filename.toStdString().c_str(), camera_.get(), > rawStream_->configuration(), metadata, buffer, > memory); > @@ -766,7 +751,7 @@ void MainWindow::processViewfinder(FrameBuffer *buffer) > << "fps:" << Qt::fixed << qSetRealNumberPrecision(2) << fps; > > /* Render the frame on the viewfinder. */ > - viewfinder_->render(buffer, planeData_[buffer]); > + viewfinder_->render(buffer, mappedBuffers_[buffer].get()); > } > > void MainWindow::queueRequest(FrameBuffer *buffer) > diff --git a/src/qcam/main_window.h b/src/qcam/main_window.h > index 28244bca58b2..a16bea09eadc 100644 > --- a/src/qcam/main_window.h > +++ b/src/qcam/main_window.h > @@ -34,6 +34,7 @@ using namespace libcamera; > class QAction; > class QComboBox; > > +class Image; > class HotplugEvent; > > enum { > @@ -106,8 +107,7 @@ private: > FrameBufferAllocator *allocator_; > > std::unique_ptr<CameraConfiguration> config_; > - std::map<FrameBuffer *, Span<uint8_t>> mappedBuffers_; > - std::map<FrameBuffer *, Span<uint8_t>> planeData_; > + std::map<FrameBuffer *, std::unique_ptr<Image>> mappedBuffers_; > > /* Capture state, buffers queue and statistics */ > bool isCapturing_; > diff --git a/src/qcam/meson.build b/src/qcam/meson.build > index 7d3621c93d41..c46f463130cd 100644 > --- a/src/qcam/meson.build > +++ b/src/qcam/meson.build > @@ -15,6 +15,7 @@ endif > qcam_enabled = true > > qcam_sources = files([ > + '../cam/image.cpp', > '../cam/options.cpp', > '../cam/stream_options.cpp', I never realized we were referencing as much "../cam" ! > 'format_converter.cpp', > diff --git a/src/qcam/viewfinder.h b/src/qcam/viewfinder.h > index 42d40f1f33f0..fb462835fb5f 100644 > --- a/src/qcam/viewfinder.h > +++ b/src/qcam/viewfinder.h > @@ -11,11 +11,11 @@ > #include <QList> > #include <QSize> > > -#include <libcamera/base/span.h> > - > #include <libcamera/formats.h> > #include <libcamera/framebuffer.h> > > +class Image; > + > class ViewFinder > { > public: > @@ -24,7 +24,7 @@ public: > virtual const QList<libcamera::PixelFormat> &nativeFormats() const = 0; > > virtual int setFormat(const libcamera::PixelFormat &format, const QSize &size) = 0; > - virtual void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) = 0; > + virtual void render(libcamera::FrameBuffer *buffer, Image *image) = 0; > virtual void stop() = 0; > > virtual QImage getCurrentImage() = 0; > diff --git a/src/qcam/viewfinder_gl.cpp b/src/qcam/viewfinder_gl.cpp > index d2ef036974f4..87e4fe03cb8d 100644 > --- a/src/qcam/viewfinder_gl.cpp > +++ b/src/qcam/viewfinder_gl.cpp > @@ -13,6 +13,8 @@ > > #include <libcamera/formats.h> > > +#include "../cam/image.h" Where should Image class be in the end, once stable enough ? Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > + > static const QList<libcamera::PixelFormat> supportedFormats{ > /* YUV - packed (single plane) */ > libcamera::formats::UYVY, > @@ -110,8 +112,7 @@ QImage ViewFinderGL::getCurrentImage() > return grabFramebuffer(); > } > > -void ViewFinderGL::render(libcamera::FrameBuffer *buffer, > - libcamera::Span<uint8_t> mem) > +void ViewFinderGL::render(libcamera::FrameBuffer *buffer, Image *image) > { > if (buffer->planes().size() != 1) { > qWarning() << "Multi-planar buffers are not supported"; > @@ -121,7 +122,7 @@ void ViewFinderGL::render(libcamera::FrameBuffer *buffer, > if (buffer_) > renderComplete(buffer_); > > - data_ = mem.data(); > + data_ = image->data(0).data(); > /* > * \todo Get the stride from the buffer instead of computing it naively > */ > diff --git a/src/qcam/viewfinder_gl.h b/src/qcam/viewfinder_gl.h > index 3334549e0be4..7cd8ef3316b9 100644 > --- a/src/qcam/viewfinder_gl.h > +++ b/src/qcam/viewfinder_gl.h > @@ -39,7 +39,7 @@ public: > const QList<libcamera::PixelFormat> &nativeFormats() const override; > > int setFormat(const libcamera::PixelFormat &format, const QSize &size) override; > - void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) override; > + void render(libcamera::FrameBuffer *buffer, Image *image) override; > void stop() override; > > QImage getCurrentImage() override; > diff --git a/src/qcam/viewfinder_qt.cpp b/src/qcam/viewfinder_qt.cpp > index a0bf99b0b522..fef6d53eef5e 100644 > --- a/src/qcam/viewfinder_qt.cpp > +++ b/src/qcam/viewfinder_qt.cpp > @@ -19,6 +19,7 @@ > > #include <libcamera/formats.h> > > +#include "../cam/image.h" > #include "format_converter.h" > > static const QMap<libcamera::PixelFormat, QImage::Format> nativeFormats > @@ -78,15 +79,14 @@ int ViewFinderQt::setFormat(const libcamera::PixelFormat &format, > return 0; > } > > -void ViewFinderQt::render(libcamera::FrameBuffer *buffer, > - libcamera::Span<uint8_t> mem) > +void ViewFinderQt::render(libcamera::FrameBuffer *buffer, Image *image) > { > if (buffer->planes().size() != 1) { > qWarning() << "Multi-planar buffers are not supported"; > return; > } > > - unsigned char *memory = mem.data(); > + unsigned char *memory = image->data(0).data(); > size_t size = buffer->metadata().planes()[0].bytesused; > > { > diff --git a/src/qcam/viewfinder_qt.h b/src/qcam/viewfinder_qt.h > index 1a569b9cee6e..6b48ef48a7d1 100644 > --- a/src/qcam/viewfinder_qt.h > +++ b/src/qcam/viewfinder_qt.h > @@ -32,7 +32,7 @@ public: > const QList<libcamera::PixelFormat> &nativeFormats() const override; > > int setFormat(const libcamera::PixelFormat &format, const QSize &size) override; > - void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) override; > + void render(libcamera::FrameBuffer *buffer, Image *image) override; > void stop() override; > > QImage getCurrentImage() override; >
Hi Laurent, On 06/09/2021 04:00, Laurent Pinchart wrote: > Now that the ViewFinderGL receives an Image, it can trivially support > multi-planar buffers. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > --- > src/qcam/viewfinder_gl.cpp | 38 +++++++++++++++++--------------------- > src/qcam/viewfinder_gl.h | 2 +- > 2 files changed, 18 insertions(+), 22 deletions(-) > > diff --git a/src/qcam/viewfinder_gl.cpp b/src/qcam/viewfinder_gl.cpp > index 87e4fe03cb8d..32232faa2ad8 100644 > --- a/src/qcam/viewfinder_gl.cpp > +++ b/src/qcam/viewfinder_gl.cpp > @@ -56,7 +56,7 @@ static const QList<libcamera::PixelFormat> supportedFormats{ > }; > > ViewFinderGL::ViewFinderGL(QWidget *parent) > - : QOpenGLWidget(parent), buffer_(nullptr), data_(nullptr), > + : QOpenGLWidget(parent), buffer_(nullptr), image_(nullptr), > vertexBuffer_(QOpenGLBuffer::VertexBuffer) > { > } > @@ -102,6 +102,7 @@ void ViewFinderGL::stop() > if (buffer_) { > renderComplete(buffer_); > buffer_ = nullptr; > + image_ = nullptr; > } > } > > @@ -114,15 +115,10 @@ QImage ViewFinderGL::getCurrentImage() > > void ViewFinderGL::render(libcamera::FrameBuffer *buffer, Image *image) > { > - if (buffer->planes().size() != 1) { > - qWarning() << "Multi-planar buffers are not supported"; > - return; > - } > - > if (buffer_) > renderComplete(buffer_); > > - data_ = image->data(0).data(); > + image_ = image; > /* > * \todo Get the stride from the buffer instead of computing it naively > */ > @@ -489,7 +485,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > > /* Activate texture UV/VU */ > @@ -503,7 +499,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE_ALPHA, > GL_UNSIGNED_BYTE, > - data_ + size_.width() * size_.height()); > + image_->data(1).data()); > shaderProgram_.setUniformValue(textureUniformU_, 1); > break; > > @@ -519,7 +515,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > > /* Activate texture U */ > @@ -533,7 +529,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_ + size_.width() * size_.height()); > + image_->data(1).data()); > shaderProgram_.setUniformValue(textureUniformU_, 1); > > /* Activate texture V */ > @@ -547,7 +543,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_ + size_.width() * size_.height() * 5 / 4); > + image_->data(2).data()); > shaderProgram_.setUniformValue(textureUniformV_, 2); > break; > > @@ -563,7 +559,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > > /* Activate texture V */ > @@ -577,7 +573,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_ + size_.width() * size_.height()); > + image_->data(1).data()); > shaderProgram_.setUniformValue(textureUniformV_, 2); > > /* Activate texture U */ > @@ -591,7 +587,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_ + size_.width() * size_.height() * 5 / 4); > + image_->data(2).data()); > shaderProgram_.setUniformValue(textureUniformU_, 1); > break; > > @@ -602,7 +598,7 @@ void ViewFinderGL::doRender() > /* > * Packed YUV formats are stored in a RGBA texture to match the > * OpenGL texel size with the 4 bytes repeating pattern in YUV. > - * The texture width is thus half of the image with. > + * The texture width is thus half of the image_ with. > */ > glActiveTexture(GL_TEXTURE0); > configureTexture(*textures_[0]); > @@ -614,7 +610,7 @@ void ViewFinderGL::doRender() > 0, > GL_RGBA, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > > /* > @@ -642,7 +638,7 @@ void ViewFinderGL::doRender() > 0, > GL_RGBA, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > break; > > @@ -658,7 +654,7 @@ void ViewFinderGL::doRender() > 0, > GL_RGB, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > break; > > @@ -689,7 +685,7 @@ void ViewFinderGL::doRender() > 0, > GL_LUMINANCE, > GL_UNSIGNED_BYTE, > - data_); > + image_->data(0).data()); > shaderProgram_.setUniformValue(textureUniformY_, 0); > shaderProgram_.setUniformValue(textureUniformBayerFirstRed_, > firstRed_); > @@ -714,7 +710,7 @@ void ViewFinderGL::paintGL() > << "create fragment shader failed."; > } > > - if (data_) { > + if (image_) { > glClearColor(0.0, 0.0, 0.0, 1.0); > glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); > > diff --git a/src/qcam/viewfinder_gl.h b/src/qcam/viewfinder_gl.h > index 7cd8ef3316b9..72a60ecb9159 100644 > --- a/src/qcam/viewfinder_gl.h > +++ b/src/qcam/viewfinder_gl.h > @@ -67,7 +67,7 @@ private: > libcamera::PixelFormat format_; > QSize size_; > unsigned int stride_; > - unsigned char *data_; > + Image *image_; > > /* Shaders */ > QOpenGLShaderProgram shaderProgram_; >
Hi Laurent On 9/6/21 7:30 AM, Laurent Pinchart wrote: > Hello everybody, > > This patch series started as an investigation of a qcam failure with > IPU3 after the merge of the FrameBuffer offset support. While a hack in > qcam would be possible, I decided to instead address the core issue and > fix it in V4L2VideoDevice. > > Compared to v1, the series now includes fixes for cam and qcam in > addition to the changes needed in the libcamera core. They have been > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > The GStreamer element seems to work fine without any change required. > The V4L2 compatibility layer is still broken, and I haven't tested the > Android HAL yet (any volunteer ?). I am applied this series on top of master and tested on nautilus. Streaming seems fine to me, although it was appearing more choppy than usual. Now that I see, entire nautilus seems laggy to respond in general. One "Chrome" process hogging 50% CPU, seems like a transient issue. So, streaming seems fine on multiple runs, with multiple resolutions selections (from the camera-app menu), however, requesting MJPEG (shutter-click) results in a segfault. The stack trace is below: [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] (mapped) [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 3 [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 2 [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6644 [0:24:11.429563781] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 193 started [0:24:11.429627478] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 193 [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 1 [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 2 [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 2 [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 2 [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 1 [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 0 [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 2 [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 1 [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16864 and for blue: 1.14863 [0:24:11.459325998] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 [0:24:11.459562485] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 Request(129:C:0/1:140736750229136) [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750230064 [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) [0:24:11.463164722] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 194 started [0:24:11.463730946] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 [0:24:11.464033686] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 194 [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 0 [0:24:11.495739092] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 195 started [0:24:11.496489311] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 [0:24:11.496737385] [10629] DEBUG DelayedControls delayed_controls.cpp:285 Queue is empty, auto queue no-op. [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 3 [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6658 [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 2 [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 3 [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 3 [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 3 [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 2 [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 1 [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 3 [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 2 [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16797 and for blue: 1.14978 [0:24:11.506297848] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 [0:24:11.506547021] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 Request(130:C:0/1:140736750225984) [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750227664 [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 1 [0:24:11.528984601] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 196 started [0:24:11.529446048] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 196 [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 3 [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 2 [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 3 [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16841 and for blue: 1.14912 [0:24:11.550170924] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 [0:24:11.550405629] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 Request(131:C:0/1:140736750235824) [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF instance (536 bytes) [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG Encode Starting:4160x3104 Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. [Switching to Thread 0x7fffe7fff640 (LWP 10629)] 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so (gdb) bt #0Â 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so #1Â 0x00007ffff597eda0 in EncoderLibJpeg::encode(libcamera::Span<unsigned char const, 18446744073709551615ul>, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer const&, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so #3Â 0x00007ffff5981871 in PostProcessorJpeg::process(libcamera::FrameBuffer const&, CameraBuffer*, CameraMetadata const&, CameraMetadata*) () Â Â from /usr/lib64/camera_hal/libcamera-hal.so #4Â 0x00007ffff597dd8d in CameraStream::process(libcamera::FrameBuffer const&, native_handle const*, CameraMetadata const&, CameraMetadata*) () Â Â from /usr/lib64/camera_hal/libcamera-hal.so #5Â 0x00007ffff59717b9 in CameraDevice::requestComplete(libcamera::Request*) () from /usr/lib64/camera_hal/libcamera-hal.so #6Â 0x00007ffff5979a7c in libcamera::BoundMethodMember<CameraDevice, void, libcamera::Request*>::activate(libcamera::Request*, bool) () Â Â from /usr/lib64/camera_hal/libcamera-hal.so #7Â 0x00007ffff58f9051 in libcamera::Signal<libcamera::IPCMessage const&>::emit(libcamera::IPCMessage const&) () from /usr/lib64/libcamera.so #8Â 0x00007ffff58f8fe8 in libcamera::Camera::requestComplete(libcamera::Request*) () from /usr/lib64/libcamera.so #9Â 0x00007ffff591f28a in libcamera::PipelineHandler::completeRequest(libcamera::Request*) () from /usr/lib64/libcamera.so #10 0x00007ffff5935cef in libcamera::IPU3CameraData::queueFrameAction(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so #11 0x00007ffff58f0b02 in libcamera::BoundMethodMember<libcamera::ipa::ipu3::IPAProxyIPU3, void, unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::activate(unsigned int, libcamera::ipa::ipu3::IPU3Action const&, bool) () from /usr/lib64/libcamera.so #12 0x00007ffff58edf81 in libcamera::Signal<unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::emit(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () Â Â from /usr/lib64/libcamera.so #13 0x00007ffff58eb5b4 in libcamera::ipa::ipu3::IPAProxyIPU3::queueFrameActionThread(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () Â Â from /usr/lib64/libcamera.so #14 0x00007ffff587caed in libcamera::Object::message(libcamera::Message*) () from /usr/lib64/libcamera-base.so #15 0x00007ffff587e00b in libcamera::Thread::dispatchMessages(libcamera::Message::Type) () from /usr/lib64/libcamera-base.so #16 0x00007ffff5876925 in libcamera::EventDispatcherPoll::processEvents() () from /usr/lib64/libcamera-base.so #17 0x00007ffff587d6e2 in libcamera::Thread::exec() () from /usr/lib64/libcamera-base.so #18 0x00007ffff58fb582 in libcamera::CameraManager::Private::run() () from /usr/lib64/libcamera.so #19 0x00007ffff587e43c in ?? () from /usr/lib64/libcamera-base.so #20 0x0000000000000000 in ?? () > > The most important change is in patches 13/27 and 14/27, which translate > between V4L2 buffers and libcamera FrameBuffer to handle the case where > a multi-planar frame buffer is used with the V4L2 single-planar API. > It's working more or less by chance at the moment (except in qcam where > it's broken, and possibly in other places I haven't tested). Patches > 01/27 to 12/27 are cleanups and additions to prepare for the work in > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > issue in the Android HAL. Worth being noted is patch 19/27 that > introduces an Image class shared by cam and qcam. The class duplicates > the MappedFrameBuffer implementation private to libcamera. I've tried to > rework MappedFrameBuffer into something I would be happy to see in the > public API, but failed to do so in a reasonable amount of time, and I > didn't want to delay this important regression fix. > > This series doesn't break any unit test, as vimc doesn't support NV12. > Addition of NV12 support to the vimc kernel driver would be very nice, > in order to test multi-planar support in our unit tests. Volunteers are > welcome ;-) > > Laurent Pinchart (27): > libcamera: base: utils: Use size_t for index in utils::enumerate() > libcamera: file_descriptor: Add a function to retrieve the inode > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > libcamera: Use V4L2PixelFormat::fromPixelFormat() > libcamera: formats: Move plane info structure to PixelFormatInfo > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > libcamera: formats: Support V4L2 non-contiguous formats > libcamera: framebuffer: Move planes check to constructor > libcamera: framebuffer: Add a function to check if planes are > contiguous > libcamera: v4l2_videodevice: Cache PixelFormatInfo > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > libcamera: v4l2_videodevice: Take stride into account to compute > offsets > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > libcamera: v4l2_videodevice: Use utils::enumerate() > libcamera: framebuffer: Allocate metadata planes at construction time > libcamera: framebuffer: Prevent modifying the number of metadata > planes > android: camera_device: Don't assume all planes use the same fd > cam: Add Image class > cam: file_sink: Use Image class to access pixel data > cam: drm: Support per-plane stride values > cam: drm: Set per-plane offsets when creating DRM frame buffer > cam: drm: Avoid importing the same dmabuf multiple times > qcam: Print bytesused for all planes > qcam: Use Image class to access pixel data > qcam: viewfinder_gl: Support multi-planar buffers > qcam: viewfinder_qt: Support multi-planar buffers > > include/libcamera/base/utils.h | 4 +- > include/libcamera/file_descriptor.h | 3 + > include/libcamera/framebuffer.h | 19 +- > include/libcamera/internal/formats.h | 22 +- > include/libcamera/internal/framebuffer.h | 2 + > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > include/libcamera/internal/v4l2_videodevice.h | 3 +- > src/android/camera_device.cpp | 25 +- > src/android/mm/generic_camera_buffer.cpp | 11 +- > src/android/yuv/post_processor_yuv.cpp | 10 +- > src/cam/camera_session.cpp | 4 +- > src/cam/drm.cpp | 38 +- > src/cam/drm.h | 7 +- > src/cam/file_sink.cpp | 44 +-- > src/cam/file_sink.h | 6 +- > src/cam/image.cpp | 107 +++++ > src/cam/image.h | 52 +++ > src/cam/kms_sink.cpp | 28 +- > src/cam/meson.build | 1 + > src/libcamera/file_descriptor.cpp | 26 ++ > src/libcamera/formats.cpp | 373 ++++++++++++++---- > src/libcamera/framebuffer.cpp | 57 ++- > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > src/libcamera/pipeline/simple/converter.cpp | 8 +- > src/libcamera/pipeline/simple/simple.cpp | 4 +- > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > src/libcamera/v4l2_pixelformat.cpp | 11 +- > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > src/qcam/format_converter.cpp | 18 +- > src/qcam/format_converter.h | 9 +- > src/qcam/main_window.cpp | 38 +- > src/qcam/main_window.h | 4 +- > src/qcam/meson.build | 1 + > src/qcam/viewfinder.h | 6 +- > src/qcam/viewfinder_gl.cpp | 45 +-- > src/qcam/viewfinder_gl.h | 4 +- > src/qcam/viewfinder_qt.cpp | 20 +- > src/qcam/viewfinder_qt.h | 2 +- > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > test/libtest/buffer_source.cpp | 3 +- > test/utils.cpp | 10 +- > 45 files changed, 911 insertions(+), 357 deletions(-) > create mode 100644 src/cam/image.cpp > create mode 100644 src/cam/image.h >
On 06/09/2021 11:08, Umang Jain wrote: > Hi Laurent > > On 9/6/21 7:30 AM, Laurent Pinchart wrote: >> Hello everybody, >> >> This patch series started as an investigation of a qcam failure with >> IPU3 after the merge of the FrameBuffer offset support. While a hack in >> qcam would be possible, I decided to instead address the core issue and >> fix it in V4L2VideoDevice. >> >> Compared to v1, the series now includes fixes for cam and qcam in >> addition to the changes needed in the libcamera core. They have been >> tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. >> >> The GStreamer element seems to work fine without any change required. >> The V4L2 compatibility layer is still broken, and I haven't tested the >> Android HAL yet (any volunteer ?). > > > I am applied this series on top of master and tested on nautilus. > Streaming seems fine to me, although it was appearing more choppy than > usual. > > Now that I see, entire nautilus seems laggy to respond in general. One > "Chrome" process hogging 50% CPU, seems like a transient issue. > > So, streaming seems fine on multiple runs, with multiple resolutions > selections (from the camera-app menu), however, requesting MJPEG > (shutter-click) results in a segfault. > > The stack trace is below: > > [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams > [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > not contiguous > [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > (direct) > [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 > '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] > (mapped) > [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video0[37:cap]: Queueing buffer 3 > [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video0[37:cap]: Dequeuing buffer 2 > [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature > estimated: 6644 > [0:24:11.429563781] [10629] DEBUG DelayedControls > delayed_controls.cpp:237 frame 193 started > [0:24:11.429627478] [10629] DEBUG DelayedControls > delayed_controls.cpp:272 Setting Exposure to 269 at index 193 > [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video6[25:cap]: Queueing buffer 1 > [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video5[27:out]: Queueing buffer 2 > [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video8[28:cap]: Queueing buffer 2 > [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video4[24:out]: Queueing buffer 2 > [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video4[24:out]: Dequeuing buffer 1 > [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video6[25:cap]: Dequeuing buffer 0 > [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video5[27:out]: Dequeuing buffer 2 > [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video8[28:cap]: Dequeuing buffer 1 > [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > red: 1.16864 and for blue: 1.14863 > [0:24:11.459325998] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 > [0:24:11.459562485] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 > [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 > Request(129:C:0/1:140736750229136) > [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 > '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams > [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request > - cookie: 140736750230064 > [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams > [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > not contiguous > [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > (direct) > [0:24:11.463164722] [10629] DEBUG DelayedControls > delayed_controls.cpp:237 frame 194 started > [0:24:11.463730946] [10629] DEBUG DelayedControls > delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 > [0:24:11.464033686] [10629] DEBUG DelayedControls > delayed_controls.cpp:272 Setting Exposure to 269 at index 194 > [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video0[37:cap]: Queueing buffer 0 > [0:24:11.495739092] [10629] DEBUG DelayedControls > delayed_controls.cpp:237 frame 195 started > [0:24:11.496489311] [10629] DEBUG DelayedControls > delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 > [0:24:11.496737385] [10629] DEBUG DelayedControls > delayed_controls.cpp:285 Queue is empty, auto queue no-op. > [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video0[37:cap]: Dequeuing buffer 3 > [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature > estimated: 6658 > [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video6[25:cap]: Queueing buffer 2 > [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video5[27:out]: Queueing buffer 3 > [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video8[28:cap]: Queueing buffer 3 > [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video4[24:out]: Queueing buffer 3 > [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video4[24:out]: Dequeuing buffer 2 > [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video6[25:cap]: Dequeuing buffer 1 > [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video5[27:out]: Dequeuing buffer 3 > [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video8[28:cap]: Dequeuing buffer 2 > [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > red: 1.16797 and for blue: 1.14978 > [0:24:11.506297848] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 > [0:24:11.506547021] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 > [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 > Request(130:C:0/1:140736750225984) > [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 > '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams > [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request > - cookie: 140736750227664 > [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams > [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > not contiguous > [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > (direct) > [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > /dev/video0[37:cap]: Queueing buffer 1 > [0:24:11.528984601] [10629] DEBUG DelayedControls > delayed_controls.cpp:237 frame 196 started > [0:24:11.529446048] [10629] DEBUG DelayedControls > delayed_controls.cpp:272 Setting Exposure to 269 at index 196 > [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video4[24:out]: Dequeuing buffer 3 > [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video6[25:cap]: Dequeuing buffer 2 > [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > /dev/video8[28:cap]: Dequeuing buffer 3 > [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > red: 1.16841 and for blue: 1.14912 > [0:24:11.550170924] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 > [0:24:11.550405629] [10629] DEBUG DelayedControls > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 > [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 > Request(131:C:0/1:140736750235824) > [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 > '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams > [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF > instance (536 bytes) > [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG > Encode Starting:4160x3104 > > Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. > [Switching to Thread 0x7fffe7fff640 (LWP 10629)] > 0x00007ffff597ead5 in > EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, > 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > (gdb) bt > #0Â 0x00007ffff597ead5 in > EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, > 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > #1Â 0x00007ffff597eda0 in > EncoderLibJpeg::encode(libcamera::Span<unsigned char const, > 18446744073709551615ul>, libcamera::Span<unsigned char, > 18446744073709551615ul>, libcamera::Span<unsigned char const, > 18446744073709551615ul>, unsigned int) () from > /usr/lib64/camera_hal/libcamera-hal.so > #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer > const&, libcamera::Span<unsigned char, 18446744073709551615ul>, 18446744073709551615ul is -1, so we've certainly missed capturing/preventing an error code from getting assigned to the span at some point, where it then got stored as an unsigned long. Does this happen repeatably? or only on some specific event? -- Kieran
On 9/6/21 4:13 PM, Kieran Bingham wrote: > On 06/09/2021 11:08, Umang Jain wrote: >> Hi Laurent >> >> On 9/6/21 7:30 AM, Laurent Pinchart wrote: >>> Hello everybody, >>> >>> This patch series started as an investigation of a qcam failure with >>> IPU3 after the merge of the FrameBuffer offset support. While a hack in >>> qcam would be possible, I decided to instead address the core issue and >>> fix it in V4L2VideoDevice. >>> >>> Compared to v1, the series now includes fixes for cam and qcam in >>> addition to the changes needed in the libcamera core. They have been >>> tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. >>> >>> The GStreamer element seems to work fine without any change required. >>> The V4L2 compatibility layer is still broken, and I haven't tested the >>> Android HAL yet (any volunteer ?). >> >> I am applied this series on top of master and tested on nautilus. >> Streaming seems fine to me, although it was appearing more choppy than >> usual. >> >> Now that I see, entire nautilus seems laggy to respond in general. One >> "Chrome" process hogging 50% CPU, seems like a transient issue. >> >> So, streaming seems fine on multiple runs, with multiple resolutions >> selections (from the camera-app menu), however, requesting MJPEG >> (shutter-click) results in a segfault. >> >> The stack trace is below: >> >> [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 >> '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams >> [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is >> not contiguous >> [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 >> '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] >> (direct) >> [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 >> '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] >> (mapped) >> [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video0[37:cap]: Queueing buffer 3 >> [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video0[37:cap]: Dequeuing buffer 2 >> [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature >> estimated: 6644 >> [0:24:11.429563781] [10629] DEBUG DelayedControls >> delayed_controls.cpp:237 frame 193 started >> [0:24:11.429627478] [10629] DEBUG DelayedControls >> delayed_controls.cpp:272 Setting Exposure to 269 at index 193 >> [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video6[25:cap]: Queueing buffer 1 >> [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video5[27:out]: Queueing buffer 2 >> [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video8[28:cap]: Queueing buffer 2 >> [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video4[24:out]: Queueing buffer 2 >> [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video4[24:out]: Dequeuing buffer 1 >> [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video6[25:cap]: Dequeuing buffer 0 >> [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video5[27:out]: Dequeuing buffer 2 >> [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video8[28:cap]: Dequeuing buffer 1 >> [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for >> red: 1.16864 and for blue: 1.14863 >> [0:24:11.459325998] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 >> [0:24:11.459562485] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 >> [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 >> Request(129:C:0/1:140736750229136) >> [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 >> '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams >> [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request >> - cookie: 140736750230064 >> [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 >> '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams >> [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is >> not contiguous >> [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 >> '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] >> (direct) >> [0:24:11.463164722] [10629] DEBUG DelayedControls >> delayed_controls.cpp:237 frame 194 started >> [0:24:11.463730946] [10629] DEBUG DelayedControls >> delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 >> [0:24:11.464033686] [10629] DEBUG DelayedControls >> delayed_controls.cpp:272 Setting Exposure to 269 at index 194 >> [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video0[37:cap]: Queueing buffer 0 >> [0:24:11.495739092] [10629] DEBUG DelayedControls >> delayed_controls.cpp:237 frame 195 started >> [0:24:11.496489311] [10629] DEBUG DelayedControls >> delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 >> [0:24:11.496737385] [10629] DEBUG DelayedControls >> delayed_controls.cpp:285 Queue is empty, auto queue no-op. >> [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video0[37:cap]: Dequeuing buffer 3 >> [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature >> estimated: 6658 >> [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video6[25:cap]: Queueing buffer 2 >> [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video5[27:out]: Queueing buffer 3 >> [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video8[28:cap]: Queueing buffer 3 >> [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video4[24:out]: Queueing buffer 3 >> [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video4[24:out]: Dequeuing buffer 2 >> [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video6[25:cap]: Dequeuing buffer 1 >> [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video5[27:out]: Dequeuing buffer 3 >> [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video8[28:cap]: Dequeuing buffer 2 >> [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for >> red: 1.16797 and for blue: 1.14978 >> [0:24:11.506297848] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 >> [0:24:11.506547021] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 >> [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 >> Request(130:C:0/1:140736750225984) >> [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 >> '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams >> [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request >> - cookie: 140736750227664 >> [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 >> '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams >> [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is >> not contiguous >> [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 >> '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] >> (direct) >> [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 >> /dev/video0[37:cap]: Queueing buffer 1 >> [0:24:11.528984601] [10629] DEBUG DelayedControls >> delayed_controls.cpp:237 frame 196 started >> [0:24:11.529446048] [10629] DEBUG DelayedControls >> delayed_controls.cpp:272 Setting Exposure to 269 at index 196 >> [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video4[24:out]: Dequeuing buffer 3 >> [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video6[25:cap]: Dequeuing buffer 2 >> [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 >> /dev/video8[28:cap]: Dequeuing buffer 3 >> [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for >> red: 1.16841 and for blue: 1.14912 >> [0:24:11.550170924] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 >> [0:24:11.550405629] [10629] DEBUG DelayedControls >> delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 >> [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 >> Request(131:C:0/1:140736750235824) >> [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 >> '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams >> [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF >> instance (536 bytes) >> [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG >> Encode Starting:4160x3104 >> >> Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. >> [Switching to Thread 0x7fffe7fff640 (LWP 10629)] >> 0x00007ffff597ead5 in >> EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, >> 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so >> (gdb) bt >> #0Â 0x00007ffff597ead5 in >> EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, >> 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so >> #1Â 0x00007ffff597eda0 in >> EncoderLibJpeg::encode(libcamera::Span<unsigned char const, >> 18446744073709551615ul>, libcamera::Span<unsigned char, >> 18446744073709551615ul>, libcamera::Span<unsigned char const, >> 18446744073709551615ul>, unsigned int) () from >> /usr/lib64/camera_hal/libcamera-hal.so >> #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer >> const&, libcamera::Span<unsigned char, 18446744073709551615ul>, > 18446744073709551615ul is -1, so we've certainly missed > capturing/preventing an error code from getting assigned to the span at > some point, where it then got stored as an unsigned long. > > > Does this happen repeatably? or only on some specific event? It happens repeatably on every run of camera-service when you try to capture the frame. The service will segfault on shutter-click and you need to re-run it again. > -- > Kieran
Hi Laurent, everyone Thanks for all the efforts to get this working! I had just a little question or two... 1. Is it easy to tell if a FrameBuffer is actually single plane or multi plane? If not, could we add a public API function that would tell us? 2. Is it easy to get the full size of the buffer for the single plane case (rather than having to add all the bits up)? And again, if the answer is no, could we add such a thing? I'm thinking of trying to make life easy for applications that might want to pass these buffers to codecs where the driver might only support single planes. Not thinking of any platform in particular... :) Thanks! David On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hello everybody, > > This patch series started as an investigation of a qcam failure with > IPU3 after the merge of the FrameBuffer offset support. While a hack in > qcam would be possible, I decided to instead address the core issue and > fix it in V4L2VideoDevice. > > Compared to v1, the series now includes fixes for cam and qcam in > addition to the changes needed in the libcamera core. They have been > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > The GStreamer element seems to work fine without any change required. > The V4L2 compatibility layer is still broken, and I haven't tested the > Android HAL yet (any volunteer ?). > > The most important change is in patches 13/27 and 14/27, which translate > between V4L2 buffers and libcamera FrameBuffer to handle the case where > a multi-planar frame buffer is used with the V4L2 single-planar API. > It's working more or less by chance at the moment (except in qcam where > it's broken, and possibly in other places I haven't tested). Patches > 01/27 to 12/27 are cleanups and additions to prepare for the work in > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > issue in the Android HAL. Worth being noted is patch 19/27 that > introduces an Image class shared by cam and qcam. The class duplicates > the MappedFrameBuffer implementation private to libcamera. I've tried to > rework MappedFrameBuffer into something I would be happy to see in the > public API, but failed to do so in a reasonable amount of time, and I > didn't want to delay this important regression fix. > > This series doesn't break any unit test, as vimc doesn't support NV12. > Addition of NV12 support to the vimc kernel driver would be very nice, > in order to test multi-planar support in our unit tests. Volunteers are > welcome ;-) > > Laurent Pinchart (27): > libcamera: base: utils: Use size_t for index in utils::enumerate() > libcamera: file_descriptor: Add a function to retrieve the inode > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > libcamera: Use V4L2PixelFormat::fromPixelFormat() > libcamera: formats: Move plane info structure to PixelFormatInfo > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > libcamera: formats: Support V4L2 non-contiguous formats > libcamera: framebuffer: Move planes check to constructor > libcamera: framebuffer: Add a function to check if planes are > contiguous > libcamera: v4l2_videodevice: Cache PixelFormatInfo > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > libcamera: v4l2_videodevice: Take stride into account to compute > offsets > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > libcamera: v4l2_videodevice: Use utils::enumerate() > libcamera: framebuffer: Allocate metadata planes at construction time > libcamera: framebuffer: Prevent modifying the number of metadata > planes > android: camera_device: Don't assume all planes use the same fd > cam: Add Image class > cam: file_sink: Use Image class to access pixel data > cam: drm: Support per-plane stride values > cam: drm: Set per-plane offsets when creating DRM frame buffer > cam: drm: Avoid importing the same dmabuf multiple times > qcam: Print bytesused for all planes > qcam: Use Image class to access pixel data > qcam: viewfinder_gl: Support multi-planar buffers > qcam: viewfinder_qt: Support multi-planar buffers > > include/libcamera/base/utils.h | 4 +- > include/libcamera/file_descriptor.h | 3 + > include/libcamera/framebuffer.h | 19 +- > include/libcamera/internal/formats.h | 22 +- > include/libcamera/internal/framebuffer.h | 2 + > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > include/libcamera/internal/v4l2_videodevice.h | 3 +- > src/android/camera_device.cpp | 25 +- > src/android/mm/generic_camera_buffer.cpp | 11 +- > src/android/yuv/post_processor_yuv.cpp | 10 +- > src/cam/camera_session.cpp | 4 +- > src/cam/drm.cpp | 38 +- > src/cam/drm.h | 7 +- > src/cam/file_sink.cpp | 44 +-- > src/cam/file_sink.h | 6 +- > src/cam/image.cpp | 107 +++++ > src/cam/image.h | 52 +++ > src/cam/kms_sink.cpp | 28 +- > src/cam/meson.build | 1 + > src/libcamera/file_descriptor.cpp | 26 ++ > src/libcamera/formats.cpp | 373 ++++++++++++++---- > src/libcamera/framebuffer.cpp | 57 ++- > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > src/libcamera/pipeline/simple/converter.cpp | 8 +- > src/libcamera/pipeline/simple/simple.cpp | 4 +- > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > src/libcamera/v4l2_pixelformat.cpp | 11 +- > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > src/qcam/format_converter.cpp | 18 +- > src/qcam/format_converter.h | 9 +- > src/qcam/main_window.cpp | 38 +- > src/qcam/main_window.h | 4 +- > src/qcam/meson.build | 1 + > src/qcam/viewfinder.h | 6 +- > src/qcam/viewfinder_gl.cpp | 45 +-- > src/qcam/viewfinder_gl.h | 4 +- > src/qcam/viewfinder_qt.cpp | 20 +- > src/qcam/viewfinder_qt.h | 2 +- > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > test/libtest/buffer_source.cpp | 3 +- > test/utils.cpp | 10 +- > 45 files changed, 911 insertions(+), 357 deletions(-) > create mode 100644 src/cam/image.cpp > create mode 100644 src/cam/image.h > > -- > Regards, > > Laurent Pinchart >
Hi Laurent, On Mon, Sep 6, 2021 at 6:04 PM <paul.elder@ideasonboard.com> wrote: > > Hi Laurent, > > On Mon, Sep 06, 2021 at 05:00:35AM +0300, Laurent Pinchart wrote: > > The inode is useful to check if two file descriptors refer to the same > > file. Add a function to retrieve it. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > > Reviewed-by: Paul Elder <paul.elder@ideasonboard.com> > > > --- > > Changes since v1: > > > > - Use isValid() instead of open-coding it > > - Print a message on error > > --- > > include/libcamera/file_descriptor.h | 3 +++ > > src/libcamera/file_descriptor.cpp | 26 ++++++++++++++++++++++++++ > > 2 files changed, 29 insertions(+) > > > > diff --git a/include/libcamera/file_descriptor.h b/include/libcamera/file_descriptor.h > > index d514aac7697b..988f9b7a3d25 100644 > > --- a/include/libcamera/file_descriptor.h > > +++ b/include/libcamera/file_descriptor.h > > @@ -8,6 +8,7 @@ > > #define __LIBCAMERA_FILE_DESCRIPTOR_H__ > > > > #include <memory> > > +#include <sys/types.h> > > > > namespace libcamera { > > > > @@ -27,6 +28,8 @@ public: > > int fd() const { return fd_ ? fd_->fd() : -1; } > > FileDescriptor dup() const; > > > > + ino_t inode() const; > > + > > private: > > class Descriptor > > { > > diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp > > index 9f9ebc81f738..0409c3e1758c 100644 > > --- a/src/libcamera/file_descriptor.cpp > > +++ b/src/libcamera/file_descriptor.cpp > > @@ -8,6 +8,8 @@ > > #include <libcamera/file_descriptor.h> > > > > #include <string.h> > > +#include <sys/stat.h> > > +#include <sys/types.h> > > #include <unistd.h> > > #include <utility> > > > > @@ -221,6 +223,30 @@ FileDescriptor FileDescriptor::dup() const > > return FileDescriptor(fd()); > > } > > > > +/** > > + * \brief Retrieve the file descriptor inode > > + * > > + * \todo Should this move to the File class ? > > + * > > + * \return The file descriptor inode on success, or 0 on error > > + */ > > +ino_t FileDescriptor::inode() const > > +{ > > + if (!isValid()) > > + return 0; > > + > > + struct stat st; > > + int ret = fstat(fd_->fd(), &st); > > + if (ret < 0) { > > + ret = -errno; > > + LOG(FileDescriptor, Fatal) > > + << "Failed to fstat() fd: " << strerror(-ret); Setting errno to ret is unnecessary? if (ret < 0) { LOG(FileDescriptor, Fatal) << "Failed to fstat() fd: " << stderr(errno); return 0; } Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > + return 0; > > + } > > + > > + return st.st_ino; > > +} > > + > > FileDescriptor::Descriptor::Descriptor(int fd, bool duplicate) > > { > > if (!duplicate) { > > -- > > Regards, > > > > Laurent Pinchart > >
Hi Hiro, On Mon, Sep 06, 2021 at 08:37:26PM +0900, Hirokazu Honda wrote: > On Mon, Sep 6, 2021 at 6:04 PM <paul.elder@ideasonboard.com> wrote: > > On Mon, Sep 06, 2021 at 05:00:35AM +0300, Laurent Pinchart wrote: > > > The inode is useful to check if two file descriptors refer to the same > > > file. Add a function to retrieve it. > > > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > > > > Reviewed-by: Paul Elder <paul.elder@ideasonboard.com> > > > > > --- > > > Changes since v1: > > > > > > - Use isValid() instead of open-coding it > > > - Print a message on error > > > --- > > > include/libcamera/file_descriptor.h | 3 +++ > > > src/libcamera/file_descriptor.cpp | 26 ++++++++++++++++++++++++++ > > > 2 files changed, 29 insertions(+) > > > > > > diff --git a/include/libcamera/file_descriptor.h b/include/libcamera/file_descriptor.h > > > index d514aac7697b..988f9b7a3d25 100644 > > > --- a/include/libcamera/file_descriptor.h > > > +++ b/include/libcamera/file_descriptor.h > > > @@ -8,6 +8,7 @@ > > > #define __LIBCAMERA_FILE_DESCRIPTOR_H__ > > > > > > #include <memory> > > > +#include <sys/types.h> > > > > > > namespace libcamera { > > > > > > @@ -27,6 +28,8 @@ public: > > > int fd() const { return fd_ ? fd_->fd() : -1; } > > > FileDescriptor dup() const; > > > > > > + ino_t inode() const; > > > + > > > private: > > > class Descriptor > > > { > > > diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp > > > index 9f9ebc81f738..0409c3e1758c 100644 > > > --- a/src/libcamera/file_descriptor.cpp > > > +++ b/src/libcamera/file_descriptor.cpp > > > @@ -8,6 +8,8 @@ > > > #include <libcamera/file_descriptor.h> > > > > > > #include <string.h> > > > +#include <sys/stat.h> > > > +#include <sys/types.h> > > > #include <unistd.h> > > > #include <utility> > > > > > > @@ -221,6 +223,30 @@ FileDescriptor FileDescriptor::dup() const > > > return FileDescriptor(fd()); > > > } > > > > > > +/** > > > + * \brief Retrieve the file descriptor inode > > > + * > > > + * \todo Should this move to the File class ? > > > + * > > > + * \return The file descriptor inode on success, or 0 on error > > > + */ > > > +ino_t FileDescriptor::inode() const > > > +{ > > > + if (!isValid()) > > > + return 0; > > > + > > > + struct stat st; > > > + int ret = fstat(fd_->fd(), &st); > > > + if (ret < 0) { > > > + ret = -errno; > > > + LOG(FileDescriptor, Fatal) > > > + << "Failed to fstat() fd: " << strerror(-ret); > > Setting errno to ret is unnecessary? > > if (ret < 0) { > LOG(FileDescriptor, Fatal) << "Failed to fstat() fd: " << stderr(errno); > return 0; > } The LOG() macros constructs a LogMessage object, and it may change errno before strerror() is called. The same is possibly true of the operator<<() calls, or other function calls in the log line. Maybe it can't happen in this specific case, but we always assign ret = -errno to be safe. > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > > > + return 0; > > > + } > > > + > > > + return st.st_ino; > > > +} > > > + > > > FileDescriptor::Descriptor::Descriptor(int fd, bool duplicate) > > > { > > > if (!duplicate) {
Hi Jean-Michel, On Mon, Sep 06, 2021 at 11:36:10AM +0200, Jean-Michel Hautbois wrote: > On 06/09/2021 04:00, Laurent Pinchart wrote: > > Replace the manual implementation of frame buffer mapping with the Image > > class to improve code sharing. The ViewFinder API is updated to take an > > Image pointer in the render() function to prepare for multi-planar > > buffer support. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > --- > > src/qcam/main_window.cpp | 31 ++++++++----------------------- > > src/qcam/main_window.h | 4 ++-- > > src/qcam/meson.build | 1 + > > src/qcam/viewfinder.h | 6 +++--- > > src/qcam/viewfinder_gl.cpp | 7 ++++--- > > src/qcam/viewfinder_gl.h | 2 +- > > src/qcam/viewfinder_qt.cpp | 6 +++--- > > src/qcam/viewfinder_qt.h | 2 +- > > 8 files changed, 23 insertions(+), 36 deletions(-) > > > > diff --git a/src/qcam/main_window.cpp b/src/qcam/main_window.cpp > > index 0a00b1001570..168dd5ce30e3 100644 > > --- a/src/qcam/main_window.cpp > > +++ b/src/qcam/main_window.cpp > > @@ -7,10 +7,9 @@ > > > > #include "main_window.h" > > > > +#include <assert.h> > > #include <iomanip> > > #include <string> > > -#include <sys/mman.h> > > -#include <unistd.h> > > > > #include <QComboBox> > > #include <QCoreApplication> > > @@ -29,6 +28,7 @@ > > #include <libcamera/camera_manager.h> > > #include <libcamera/version.h> > > > > +#include "../cam/image.h" > > #include "dng_writer.h" > > #ifndef QT_NO_OPENGL > > #include "viewfinder_gl.h" > > @@ -473,15 +473,10 @@ int MainWindow::startCapture() > > > > for (const std::unique_ptr<FrameBuffer> &buffer : allocator_->buffers(stream)) { > > /* Map memory buffers and cache the mappings. */ > > - const FrameBuffer::Plane &plane = buffer->planes().front(); > > - size_t length = lseek(plane.fd.fd(), 0, SEEK_END); > > - void *memory = mmap(NULL, length, PROT_READ, MAP_SHARED, > > - plane.fd.fd(), 0); > > - > > - mappedBuffers_[buffer.get()] = { static_cast<uint8_t *>(memory), > > - plane.length }; > > - planeData_[buffer.get()] = { static_cast<uint8_t *>(memory) + plane.offset, > > - plane.length }; > > + std::unique_ptr<Image> image = > > + Image::fromFrameBuffer(buffer.get(), Image::MapMode::ReadOnly); > > + assert(image != nullptr); > > + mappedBuffers_[buffer.get()] = std::move(image); > > > > /* Store buffers on the free list. */ > > freeBuffers_[stream].enqueue(buffer.get()); > > @@ -543,12 +538,7 @@ error_disconnect: > > error: > > requests_.clear(); > > > > - for (auto &iter : mappedBuffers_) { > > - const Span<uint8_t> &buffer = iter.second; > > - munmap(buffer.data(), buffer.size()); > > - } > > mappedBuffers_.clear(); > > - planeData_.clear(); > > > > freeBuffers_.clear(); > > > > @@ -580,12 +570,7 @@ void MainWindow::stopCapture() > > > > camera_->requestCompleted.disconnect(this); > > > > - for (auto &iter : mappedBuffers_) { > > - const Span<uint8_t> &buffer = iter.second; > > - munmap(buffer.data(), buffer.size()); > > - } > > mappedBuffers_.clear(); > > - planeData_.clear(); > > > > requests_.clear(); > > freeQueue_.clear(); > > @@ -682,7 +667,7 @@ void MainWindow::processRaw(FrameBuffer *buffer, > > "DNG Files (*.dng)"); > > > > if (!filename.isEmpty()) { > > - uint8_t *memory = planeData_[buffer].data(); > > + uint8_t *memory = mappedBuffers_[buffer]->data(0).data(); > > DNGWriter::write(filename.toStdString().c_str(), camera_.get(), > > rawStream_->configuration(), metadata, buffer, > > memory); > > @@ -766,7 +751,7 @@ void MainWindow::processViewfinder(FrameBuffer *buffer) > > << "fps:" << Qt::fixed << qSetRealNumberPrecision(2) << fps; > > > > /* Render the frame on the viewfinder. */ > > - viewfinder_->render(buffer, planeData_[buffer]); > > + viewfinder_->render(buffer, mappedBuffers_[buffer].get()); > > } > > > > void MainWindow::queueRequest(FrameBuffer *buffer) > > diff --git a/src/qcam/main_window.h b/src/qcam/main_window.h > > index 28244bca58b2..a16bea09eadc 100644 > > --- a/src/qcam/main_window.h > > +++ b/src/qcam/main_window.h > > @@ -34,6 +34,7 @@ using namespace libcamera; > > class QAction; > > class QComboBox; > > > > +class Image; > > class HotplugEvent; > > > > enum { > > @@ -106,8 +107,7 @@ private: > > FrameBufferAllocator *allocator_; > > > > std::unique_ptr<CameraConfiguration> config_; > > - std::map<FrameBuffer *, Span<uint8_t>> mappedBuffers_; > > - std::map<FrameBuffer *, Span<uint8_t>> planeData_; > > + std::map<FrameBuffer *, std::unique_ptr<Image>> mappedBuffers_; > > > > /* Capture state, buffers queue and statistics */ > > bool isCapturing_; > > diff --git a/src/qcam/meson.build b/src/qcam/meson.build > > index 7d3621c93d41..c46f463130cd 100644 > > --- a/src/qcam/meson.build > > +++ b/src/qcam/meson.build > > @@ -15,6 +15,7 @@ endif > > qcam_enabled = true > > > > qcam_sources = files([ > > + '../cam/image.cpp', > > '../cam/options.cpp', > > '../cam/stream_options.cpp', > > I never realized we were referencing as much "../cam" ! > > > 'format_converter.cpp', > > diff --git a/src/qcam/viewfinder.h b/src/qcam/viewfinder.h > > index 42d40f1f33f0..fb462835fb5f 100644 > > --- a/src/qcam/viewfinder.h > > +++ b/src/qcam/viewfinder.h > > @@ -11,11 +11,11 @@ > > #include <QList> > > #include <QSize> > > > > -#include <libcamera/base/span.h> > > - > > #include <libcamera/formats.h> > > #include <libcamera/framebuffer.h> > > > > +class Image; > > + > > class ViewFinder > > { > > public: > > @@ -24,7 +24,7 @@ public: > > virtual const QList<libcamera::PixelFormat> &nativeFormats() const = 0; > > > > virtual int setFormat(const libcamera::PixelFormat &format, const QSize &size) = 0; > > - virtual void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) = 0; > > + virtual void render(libcamera::FrameBuffer *buffer, Image *image) = 0; > > virtual void stop() = 0; > > > > virtual QImage getCurrentImage() = 0; > > diff --git a/src/qcam/viewfinder_gl.cpp b/src/qcam/viewfinder_gl.cpp > > index d2ef036974f4..87e4fe03cb8d 100644 > > --- a/src/qcam/viewfinder_gl.cpp > > +++ b/src/qcam/viewfinder_gl.cpp > > @@ -13,6 +13,8 @@ > > > > #include <libcamera/formats.h> > > > > +#include "../cam/image.h" > > Where should Image class be in the end, once stable enough ? I expect it to move to include/libcamera/ then. > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > > > + > > static const QList<libcamera::PixelFormat> supportedFormats{ > > /* YUV - packed (single plane) */ > > libcamera::formats::UYVY, > > @@ -110,8 +112,7 @@ QImage ViewFinderGL::getCurrentImage() > > return grabFramebuffer(); > > } > > > > -void ViewFinderGL::render(libcamera::FrameBuffer *buffer, > > - libcamera::Span<uint8_t> mem) > > +void ViewFinderGL::render(libcamera::FrameBuffer *buffer, Image *image) > > { > > if (buffer->planes().size() != 1) { > > qWarning() << "Multi-planar buffers are not supported"; > > @@ -121,7 +122,7 @@ void ViewFinderGL::render(libcamera::FrameBuffer *buffer, > > if (buffer_) > > renderComplete(buffer_); > > > > - data_ = mem.data(); > > + data_ = image->data(0).data(); > > /* > > * \todo Get the stride from the buffer instead of computing it naively > > */ > > diff --git a/src/qcam/viewfinder_gl.h b/src/qcam/viewfinder_gl.h > > index 3334549e0be4..7cd8ef3316b9 100644 > > --- a/src/qcam/viewfinder_gl.h > > +++ b/src/qcam/viewfinder_gl.h > > @@ -39,7 +39,7 @@ public: > > const QList<libcamera::PixelFormat> &nativeFormats() const override; > > > > int setFormat(const libcamera::PixelFormat &format, const QSize &size) override; > > - void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) override; > > + void render(libcamera::FrameBuffer *buffer, Image *image) override; > > void stop() override; > > > > QImage getCurrentImage() override; > > diff --git a/src/qcam/viewfinder_qt.cpp b/src/qcam/viewfinder_qt.cpp > > index a0bf99b0b522..fef6d53eef5e 100644 > > --- a/src/qcam/viewfinder_qt.cpp > > +++ b/src/qcam/viewfinder_qt.cpp > > @@ -19,6 +19,7 @@ > > > > #include <libcamera/formats.h> > > > > +#include "../cam/image.h" > > #include "format_converter.h" > > > > static const QMap<libcamera::PixelFormat, QImage::Format> nativeFormats > > @@ -78,15 +79,14 @@ int ViewFinderQt::setFormat(const libcamera::PixelFormat &format, > > return 0; > > } > > > > -void ViewFinderQt::render(libcamera::FrameBuffer *buffer, > > - libcamera::Span<uint8_t> mem) > > +void ViewFinderQt::render(libcamera::FrameBuffer *buffer, Image *image) > > { > > if (buffer->planes().size() != 1) { > > qWarning() << "Multi-planar buffers are not supported"; > > return; > > } > > > > - unsigned char *memory = mem.data(); > > + unsigned char *memory = image->data(0).data(); > > size_t size = buffer->metadata().planes()[0].bytesused; > > > > { > > diff --git a/src/qcam/viewfinder_qt.h b/src/qcam/viewfinder_qt.h > > index 1a569b9cee6e..6b48ef48a7d1 100644 > > --- a/src/qcam/viewfinder_qt.h > > +++ b/src/qcam/viewfinder_qt.h > > @@ -32,7 +32,7 @@ public: > > const QList<libcamera::PixelFormat> &nativeFormats() const override; > > > > int setFormat(const libcamera::PixelFormat &format, const QSize &size) override; > > - void render(libcamera::FrameBuffer *buffer, libcamera::Span<uint8_t> mem) override; > > + void render(libcamera::FrameBuffer *buffer, Image *image) override; > > void stop() override; > > > > QImage getCurrentImage() override; > >
Hi Jean-Michel, On Mon, Sep 06, 2021 at 10:35:50AM +0200, Jean-Michel Hautbois wrote: > On 06/09/2021 04:00, Laurent Pinchart wrote: > > The new Image class represents a multi-planar image with direct access > > to pixel data. It currently duplicates the function of the > > MappedFrameBuffer class which is internal to libcamera, and will serve > > as a design playground to improve the API until it is considered ready > > to be made part of the libcamera public API. > > I like the idea, maybe add some documentation already in the class ? That's a good idea, but in the specific case I'd like to get this series merged ASAP to fix the breakage in the master branch, so I'd prefer adding the documentation on top. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > --- > > src/cam/image.cpp | 107 ++++++++++++++++++++++++++++++++++++++++++++ > > src/cam/image.h | 52 +++++++++++++++++++++ > > src/cam/meson.build | 1 + > > 3 files changed, 160 insertions(+) > > create mode 100644 src/cam/image.cpp > > create mode 100644 src/cam/image.h > > > > diff --git a/src/cam/image.cpp b/src/cam/image.cpp > > new file mode 100644 > > index 000000000000..7ae5f52dccb4 > > --- /dev/null > > +++ b/src/cam/image.cpp > > @@ -0,0 +1,107 @@ > > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > > +/* > > + * Copyright (C) 2021, Google Inc. > > + * > > + * image.cpp - Multi-planar image with access to pixel data > > + */ > > + > > +#include "image.h" > > + > > +#include <assert.h> > > +#include <errno.h> > > +#include <iostream> > > +#include <map> > > +#include <string.h> > > +#include <sys/mman.h> > > +#include <unistd.h> > > + > > +using namespace libcamera; > > + > > +std::unique_ptr<Image> Image::fromFrameBuffer(const FrameBuffer *buffer, MapMode mode) > > Can you see a use case for Image::toFrameBuffer not implemented yet ? > What would be in this class apart from a conversion from a FrameBuffer > to a Image ? I don't think so. The Image class is an interface to provide access to pixel data. In its current form it's constructed from a FrameBuffer, but I'd like the ability to construct it from a byte array as well. This is related to the MappedVectorBuffer class that Hiro has proposed, it would allow the JPEG compression in the Android HAL to use an Image as the source, regardless of whether the compresses the still capture (coming from libcamera in a FrameBuffer) or the thumbnail (downscaled in software and stored in a std::vector<uint8_t>). What I still haven't determined is whether the Image class should be an interface with pure virtual functions only, implemented by subclasses such as FrameBufferImage or Memory Image, or if it should contain the data as well, populated by the different constructors. I've also started to think about how to perform the mapping. For FrameBuffer objects constructed from Android buffers, the mapping should be delegated to gralloc on Android and to the CameraBufferManager on Chrome OS. For FrameBuffer objects constructed internally by the V4L2VideoDevice (and in particular the ones exposes to applications with FrameBufferAllocator), the code below should be correct. For other types of FrameBuffer objects supplied by applications, another method of mapping may be needed. I'm not sure yet how to best handle that, and if we'll need a FrameBufferMapper object that FrameBuffer instances will reference. > > +{ > > + std::unique_ptr<Image> image{ new Image() }; > > + > > + assert(!buffer->planes().empty()); > > + > > + int mmapFlags = 0; > > + > > + if (mode & MapMode::ReadOnly) > > + mmapFlags |= PROT_READ; > > + > > + if (mode & MapMode::WriteOnly) > > + mmapFlags |= PROT_WRITE; > > + > > + struct MappedBufferInfo { > > + uint8_t *address = nullptr; > > + size_t mapLength = 0; > > + size_t dmabufLength = 0; > > + }; > > + std::map<int, MappedBufferInfo> mappedBuffers; > > + > > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > > + const int fd = plane.fd.fd(); > > + if (mappedBuffers.find(fd) == mappedBuffers.end()) { > > + const size_t length = lseek(fd, 0, SEEK_END); > > + mappedBuffers[fd] = MappedBufferInfo{ nullptr, 0, length }; > > + } > > + > > + const size_t length = mappedBuffers[fd].dmabufLength; > > + > > + if (plane.offset > length || > > + plane.offset + plane.length > length) { > > + std::cerr << "plane is out of buffer: buffer length=" > > + << length << ", plane offset=" << plane.offset > > + << ", plane length=" << plane.length > > + << std::endl; > > + return nullptr; > > + } > > + size_t &mapLength = mappedBuffers[fd].mapLength; > > + mapLength = std::max(mapLength, > > + static_cast<size_t>(plane.offset + plane.length)); > > + } > > + > > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > > + const int fd = plane.fd.fd(); > > + auto &info = mappedBuffers[fd]; > > + if (!info.address) { > > + void *address = mmap(nullptr, info.mapLength, mmapFlags, > > + MAP_SHARED, fd, 0); > > + if (address == MAP_FAILED) { > > + int error = -errno; > > + std::cerr << "Failed to mmap plane: " > > + << strerror(-error) << std::endl; > > + return nullptr; > > + } > > + > > + info.address = static_cast<uint8_t *>(address); > > + image->maps_.emplace_back(info.address, info.mapLength); > > + } > > + > > + image->planes_.emplace_back(info.address + plane.offset, plane.length); > > + } > > + > > Why are you using two loops on buffer->planes() ? Is it for code clarity > or something I did not get ? Because we may have multiple planes using the same dmabuf fd. The first look gathers the dmabuf fds along with their length, the second loop then maps them. We need to compute the length to be mapped by looking at all planes first, before doing any mapping. > > + return image; > > +} > > + > > +Image::Image() = default; > > + > > +Image::~Image() > > +{ > > + for (Span<uint8_t> &map : maps_) > > + munmap(map.data(), map.size()); > > +} > > + > > +unsigned int Image::numPlanes() const > > +{ > > + return planes_.size(); > > +} > > + > > +Span<uint8_t> Image::data(unsigned int plane) > > +{ > > + return planes_[plane]; > > +} > > + > > +Span<const uint8_t> Image::data(unsigned int plane) const > > +{ > > + return planes_[plane]; > > +} > > diff --git a/src/cam/image.h b/src/cam/image.h > > new file mode 100644 > > index 000000000000..1ce5f84e5f9e > > --- /dev/null > > +++ b/src/cam/image.h > > @@ -0,0 +1,52 @@ > > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > > +/* > > + * Copyright (C) 2021, Google Inc. > > + * > > + * image.h - Multi-planar image with access to pixel data > > + */ > > +#ifndef __CAM_IMAGE_H__ > > +#define __CAM_IMAGE_H__ > > + > > +#include <memory> > > +#include <stdint.h> > > +#include <vector> > > + > > +#include <libcamera/base/class.h> > > +#include <libcamera/base/flags.h> > > +#include <libcamera/base/span.h> > > + > > +#include <libcamera/framebuffer.h> > > + > > +class Image > > +{ > > +public: > > + enum class MapMode { > > + ReadOnly = 1 << 0, > > + WriteOnly = 1 << 1, > > + ReadWrite = ReadOnly | WriteOnly, > > + }; > > + > > + static std::unique_ptr<Image> fromFrameBuffer(const libcamera::FrameBuffer *buffer, > > + MapMode mode); > > + > > + ~Image(); > > + > > + unsigned int numPlanes() const; > > + > > + libcamera::Span<uint8_t> data(unsigned int plane); > > + libcamera::Span<const uint8_t> data(unsigned int plane) const; > > + > > +private: > > + LIBCAMERA_DISABLE_COPY(Image) > > + > > + Image(); > > + > > + std::vector<libcamera::Span<uint8_t>> maps_; > > + std::vector<libcamera::Span<uint8_t>> planes_; > > +}; > > + > > +namespace libcamera { > > +LIBCAMERA_FLAGS_ENABLE_OPERATORS(Image::MapMode) > > +} > > + > > +#endif /* __CAM_IMAGE_H__ */ > > diff --git a/src/cam/meson.build b/src/cam/meson.build > > index ea36aaa5c514..e8e2ae57d3f4 100644 > > --- a/src/cam/meson.build > > +++ b/src/cam/meson.build > > @@ -14,6 +14,7 @@ cam_sources = files([ > > 'event_loop.cpp', > > 'file_sink.cpp', > > 'frame_sink.cpp', > > + 'image.cpp', > > 'main.cpp', > > 'options.cpp', > > 'stream_options.cpp', > >
Hi Jean-Michel, On Mon, Sep 06, 2021 at 07:50:58AM +0200, Jean-Michel Hautbois wrote: > On 06/09/2021 04:00, Laurent Pinchart wrote: > > Multi-planar frame buffers can store their planes contiguously in > > memory, or split them in discontiguous memory areas. Add a private > > function to check in which of these two categories the frame buffer > > belongs. This will be used to correctly handle the differences between > > the V4L2 single and multi planar APIs. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > Changes v1: > > > > - Merge both loops in FrameBuffer::FrameBuffer() > > --- > > include/libcamera/internal/framebuffer.h | 2 ++ > > src/libcamera/framebuffer.cpp | 45 ++++++++++++++++++++++-- > > 2 files changed, 45 insertions(+), 2 deletions(-) > > > > diff --git a/include/libcamera/internal/framebuffer.h b/include/libcamera/internal/framebuffer.h > > index 606aed2b4782..cd33c295466e 100644 > > --- a/include/libcamera/internal/framebuffer.h > > +++ b/include/libcamera/internal/framebuffer.h > > @@ -21,9 +21,11 @@ public: > > Private(); > > > > void setRequest(Request *request) { request_ = request; } > > + bool isContiguous() const { return isContiguous_; } > > > > private: > > Request *request_; > > + bool isContiguous_; > > }; > > > > } /* namespace libcamera */ > > diff --git a/src/libcamera/framebuffer.cpp b/src/libcamera/framebuffer.cpp > > index ad63a34a83bf..e71c2ffae034 100644 > > --- a/src/libcamera/framebuffer.cpp > > +++ b/src/libcamera/framebuffer.cpp > > @@ -106,7 +106,7 @@ LOG_DEFINE_CATEGORY(Buffer) > > */ > > > > FrameBuffer::Private::Private() > > - : request_(nullptr) > > + : request_(nullptr), isContiguous_(true) > > { > > } > > > > @@ -120,6 +120,17 @@ FrameBuffer::Private::Private() > > * handlers, it is called by the pipeline handlers themselves. > > */ > > > > +/** > > + * \fn FrameBuffer::Private::isContiguous() > > + * \brief Check if the frame buffer stores planes contiguously in memory > > + * > > + * Multi-planar frame buffers can store their planes contiguously in memory, or > > + * split them into discontiguous memory areas. This function checks in which of > > + * these two categories the frame buffer belongs. > > + * > > + * \return True if the planes are stored contiguously in memory, false otherwise > > + */ > > + > > /** > > * \class FrameBuffer > > * \brief Frame buffer data and its associated dynamic metadata > > @@ -199,8 +210,38 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie) > > : Extensible(std::make_unique<Private>()), planes_(planes), > > cookie_(cookie) > > { > > - for (const auto &plane : planes_) > > + unsigned int offset = 0; > > + bool isContiguous = true; > > + ino_t inode = 0; > > + > > + for (const auto &plane : planes_) { > > ASSERT(plane.offset != Plane::kInvalidOffset); > > + > > + if (plane.offset != offset) { > > + isContiguous = false; > > + break; > > + } > > + > > + /* > > + * Two different dmabuf file descriptors may still refer to the > > + * same dmabuf instance. Check this using inodes. > > + */ > > How is that possible ? dup(), or IPC. Depending on the IPC mechanism, if you send the same fd twice (once for each plane in an NV12 buffer for instance), you may get two different fds on the receiving side. > > + if (plane.fd.fd() != planes_[0].fd.fd()) { > > + if (!inode) > > + inode = planes_[0].fd.inode(); > > + if (plane.fd.inode() != inode) { > > + isContiguous = false; > > + break; > > + } > > + } > > + > > + offset += plane.length; > > + } > > + > > + LOG(Buffer, Debug) > > + << "Buffer is " << (isContiguous ? "not " : "") << "contiguous"; > > + > > + _d()->isContiguous_ = isContiguous; > > } > > > > /**
Hi David, On Mon, Sep 06, 2021 at 12:22:25PM +0100, David Plowman wrote: > Hi Laurent, everyone > > Thanks for all the efforts to get this working! I had just a little > question or two... > > 1. Is it easy to tell if a FrameBuffer is actually single plane or > multi plane? If not, could we add a public API function that would > tell us? You can use FrameBuffer::planes().size() to get the number of planes. Or did you mean checking if the different planes in a multi-planar frame buffer are stored contiguously in the same dmabuf ? There's a private helper for that (FrameBuffer::Private::isContiguous()), I haven't made it public yet as I wanted to evaluate the use cases first. > 2. Is it easy to get the full size of the buffer for the single plane > case (rather than having to add all the bits up)? And again, if the > answer is no, could we add such a thing? > > I'm thinking of trying to make life easy for applications that might > want to pass these buffers to codecs where the driver might only > support single planes. Not thinking of any platform in particular... > :) It again depends what you mean :-) If the FrameBuffer has a single plane, FrameBuffer::planes()[0].length (and FrameMetaData::planes()[0].bytesused) will give you what you need. I suspect you're however consider the case of a multi-planar FrameBuffer with planes stored contiguously in memory, using the single-planar V4L2 formats (e.g. V4L2_PIX_FMT_NV12, as opposed to V4L2_PIX_FMT_NV12M). I'm a bit worried that a helper function in that case would be used by applications to ignore that the buffer can be truly multi-planar. > On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart wrote: > > > > Hello everybody, > > > > This patch series started as an investigation of a qcam failure with > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > qcam would be possible, I decided to instead address the core issue and > > fix it in V4L2VideoDevice. > > > > Compared to v1, the series now includes fixes for cam and qcam in > > addition to the changes needed in the libcamera core. They have been > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > The GStreamer element seems to work fine without any change required. > > The V4L2 compatibility layer is still broken, and I haven't tested the > > Android HAL yet (any volunteer ?). > > > > The most important change is in patches 13/27 and 14/27, which translate > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > It's working more or less by chance at the moment (except in qcam where > > it's broken, and possibly in other places I haven't tested). Patches > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > issue in the Android HAL. Worth being noted is patch 19/27 that > > introduces an Image class shared by cam and qcam. The class duplicates > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > rework MappedFrameBuffer into something I would be happy to see in the > > public API, but failed to do so in a reasonable amount of time, and I > > didn't want to delay this important regression fix. > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > Addition of NV12 support to the vimc kernel driver would be very nice, > > in order to test multi-planar support in our unit tests. Volunteers are > > welcome ;-) > > > > Laurent Pinchart (27): > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > libcamera: file_descriptor: Add a function to retrieve the inode > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > libcamera: formats: Move plane info structure to PixelFormatInfo > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > libcamera: formats: Support V4L2 non-contiguous formats > > libcamera: framebuffer: Move planes check to constructor > > libcamera: framebuffer: Add a function to check if planes are > > contiguous > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > libcamera: v4l2_videodevice: Take stride into account to compute > > offsets > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > libcamera: v4l2_videodevice: Use utils::enumerate() > > libcamera: framebuffer: Allocate metadata planes at construction time > > libcamera: framebuffer: Prevent modifying the number of metadata > > planes > > android: camera_device: Don't assume all planes use the same fd > > cam: Add Image class > > cam: file_sink: Use Image class to access pixel data > > cam: drm: Support per-plane stride values > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > cam: drm: Avoid importing the same dmabuf multiple times > > qcam: Print bytesused for all planes > > qcam: Use Image class to access pixel data > > qcam: viewfinder_gl: Support multi-planar buffers > > qcam: viewfinder_qt: Support multi-planar buffers > > > > include/libcamera/base/utils.h | 4 +- > > include/libcamera/file_descriptor.h | 3 + > > include/libcamera/framebuffer.h | 19 +- > > include/libcamera/internal/formats.h | 22 +- > > include/libcamera/internal/framebuffer.h | 2 + > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > src/android/camera_device.cpp | 25 +- > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > src/cam/camera_session.cpp | 4 +- > > src/cam/drm.cpp | 38 +- > > src/cam/drm.h | 7 +- > > src/cam/file_sink.cpp | 44 +-- > > src/cam/file_sink.h | 6 +- > > src/cam/image.cpp | 107 +++++ > > src/cam/image.h | 52 +++ > > src/cam/kms_sink.cpp | 28 +- > > src/cam/meson.build | 1 + > > src/libcamera/file_descriptor.cpp | 26 ++ > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > src/libcamera/framebuffer.cpp | 57 ++- > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > src/qcam/format_converter.cpp | 18 +- > > src/qcam/format_converter.h | 9 +- > > src/qcam/main_window.cpp | 38 +- > > src/qcam/main_window.h | 4 +- > > src/qcam/meson.build | 1 + > > src/qcam/viewfinder.h | 6 +- > > src/qcam/viewfinder_gl.cpp | 45 +-- > > src/qcam/viewfinder_gl.h | 4 +- > > src/qcam/viewfinder_qt.cpp | 20 +- > > src/qcam/viewfinder_qt.h | 2 +- > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > test/libtest/buffer_source.cpp | 3 +- > > test/utils.cpp | 10 +- > > 45 files changed, 911 insertions(+), 357 deletions(-) > > create mode 100644 src/cam/image.cpp > > create mode 100644 src/cam/image.h
Hi Kieran, On Mon, Sep 06, 2021 at 11:43:15AM +0100, Kieran Bingham wrote: > On 06/09/2021 11:08, Umang Jain wrote: > > On 9/6/21 7:30 AM, Laurent Pinchart wrote: > >> Hello everybody, > >> > >> This patch series started as an investigation of a qcam failure with > >> IPU3 after the merge of the FrameBuffer offset support. While a hack in > >> qcam would be possible, I decided to instead address the core issue and > >> fix it in V4L2VideoDevice. > >> > >> Compared to v1, the series now includes fixes for cam and qcam in > >> addition to the changes needed in the libcamera core. They have been > >> tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > >> > >> The GStreamer element seems to work fine without any change required. > >> The V4L2 compatibility layer is still broken, and I haven't tested the > >> Android HAL yet (any volunteer ?). > > > > I am applied this series on top of master and tested on nautilus. > > Streaming seems fine to me, although it was appearing more choppy than > > usual. > > > > Now that I see, entire nautilus seems laggy to respond in general. One > > "Chrome" process hogging 50% CPU, seems like a transient issue. > > > > So, streaming seems fine on multiple runs, with multiple resolutions > > selections (from the camera-app menu), however, requesting MJPEG > > (shutter-click) results in a segfault. > > > > The stack trace is below: > > > > [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 > > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams > > [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > > not contiguous > > [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 > > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > > (direct) > > [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 > > '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] > > (mapped) > > [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video0[37:cap]: Queueing buffer 3 > > [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video0[37:cap]: Dequeuing buffer 2 > > [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature > > estimated: 6644 > > [0:24:11.429563781] [10629] DEBUG DelayedControls > > delayed_controls.cpp:237 frame 193 started > > [0:24:11.429627478] [10629] DEBUG DelayedControls > > delayed_controls.cpp:272 Setting Exposure to 269 at index 193 > > [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video6[25:cap]: Queueing buffer 1 > > [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video5[27:out]: Queueing buffer 2 > > [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video8[28:cap]: Queueing buffer 2 > > [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video4[24:out]: Queueing buffer 2 > > [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video4[24:out]: Dequeuing buffer 1 > > [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video6[25:cap]: Dequeuing buffer 0 > > [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video5[27:out]: Dequeuing buffer 2 > > [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video8[28:cap]: Dequeuing buffer 1 > > [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > > [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > > [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > > red: 1.16864 and for blue: 1.14863 > > [0:24:11.459325998] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 > > [0:24:11.459562485] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 > > [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 > > Request(129:C:0/1:140736750229136) > > [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 > > '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams > > [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request > > - cookie: 140736750230064 > > [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 > > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams > > [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > > not contiguous > > [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 > > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > > (direct) > > [0:24:11.463164722] [10629] DEBUG DelayedControls > > delayed_controls.cpp:237 frame 194 started > > [0:24:11.463730946] [10629] DEBUG DelayedControls > > delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 > > [0:24:11.464033686] [10629] DEBUG DelayedControls > > delayed_controls.cpp:272 Setting Exposure to 269 at index 194 > > [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video0[37:cap]: Queueing buffer 0 > > [0:24:11.495739092] [10629] DEBUG DelayedControls > > delayed_controls.cpp:237 frame 195 started > > [0:24:11.496489311] [10629] DEBUG DelayedControls > > delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 > > [0:24:11.496737385] [10629] DEBUG DelayedControls > > delayed_controls.cpp:285 Queue is empty, auto queue no-op. > > [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video0[37:cap]: Dequeuing buffer 3 > > [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature > > estimated: 6658 > > [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video6[25:cap]: Queueing buffer 2 > > [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video5[27:out]: Queueing buffer 3 > > [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video8[28:cap]: Queueing buffer 3 > > [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video4[24:out]: Queueing buffer 3 > > [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video4[24:out]: Dequeuing buffer 2 > > [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video6[25:cap]: Dequeuing buffer 1 > > [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video5[27:out]: Dequeuing buffer 3 > > [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video8[28:cap]: Dequeuing buffer 2 > > [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > > [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > > [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > > red: 1.16797 and for blue: 1.14978 > > [0:24:11.506297848] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 > > [0:24:11.506547021] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 > > [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 > > Request(130:C:0/1:140736750225984) > > [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 > > '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams > > [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request > > - cookie: 140736750227664 > > [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 > > '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams > > [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is > > not contiguous > > [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 > > '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] > > (direct) > > [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 > > /dev/video0[37:cap]: Queueing buffer 1 > > [0:24:11.528984601] [10629] DEBUG DelayedControls > > delayed_controls.cpp:237 frame 196 started > > [0:24:11.529446048] [10629] DEBUG DelayedControls > > delayed_controls.cpp:272 Setting Exposure to 269 at index 196 > > [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video4[24:out]: Dequeuing buffer 3 > > [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video6[25:cap]: Dequeuing buffer 2 > > [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 > > /dev/video8[28:cap]: Dequeuing buffer 3 > > [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > > [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > > [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for > > red: 1.16841 and for blue: 1.14912 > > [0:24:11.550170924] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 > > [0:24:11.550405629] [10629] DEBUG DelayedControls > > delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 > > [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 > > Request(131:C:0/1:140736750235824) > > [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 > > '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams > > [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF > > instance (536 bytes) > > [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG > > Encode Starting:4160x3104 > > > > Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. > > [Switching to Thread 0x7fffe7fff640 (LWP 10629)] > > 0x00007ffff597ead5 in > > EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, > > 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > > (gdb) bt > > #0Â 0x00007ffff597ead5 in > > EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, > > 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > > #1Â 0x00007ffff597eda0 in > > EncoderLibJpeg::encode(libcamera::Span<unsigned char const, > > 18446744073709551615ul>, libcamera::Span<unsigned char, > > 18446744073709551615ul>, libcamera::Span<unsigned char const, > > 18446744073709551615ul>, unsigned int) () from > > /usr/lib64/camera_hal/libcamera-hal.so > > #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer > > const&, libcamera::Span<unsigned char, 18446744073709551615ul>, > > 18446744073709551615ul is -1, so we've certainly missed > capturing/preventing an error code from getting assigned to the span at > some point, where it then got stored as an unsigned long. This it the template argument, and -1 indicates a dynamic extent, as opposed to Span<> instances that have a static extent. > Does this happen repeatably? or only on some specific event?
Hi Umang, On Mon, Sep 06, 2021 at 03:38:04PM +0530, Umang Jain wrote: > On 9/6/21 7:30 AM, Laurent Pinchart wrote: > > Hello everybody, > > > > This patch series started as an investigation of a qcam failure with > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > qcam would be possible, I decided to instead address the core issue and > > fix it in V4L2VideoDevice. > > > > Compared to v1, the series now includes fixes for cam and qcam in > > addition to the changes needed in the libcamera core. They have been > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > The GStreamer element seems to work fine without any change required. > > The V4L2 compatibility layer is still broken, and I haven't tested the > > Android HAL yet (any volunteer ?). > > I am applied this series on top of master and tested on nautilus. > Streaming seems fine to me, although it was appearing more choppy than > usual. > > Now that I see, entire nautilus seems laggy to respond in general. One > "Chrome" process hogging 50% CPU, seems like a transient issue. > > So, streaming seems fine on multiple runs, with multiple resolutions > selections (from the camera-app menu), however, requesting MJPEG > (shutter-click) results in a segfault. Thanks for testing ! > The stack trace is below: > > [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams > [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] (mapped) > [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 3 > [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 2 > [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6644 > [0:24:11.429563781] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 193 started > [0:24:11.429627478] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 193 > [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 1 > [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 2 > [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 2 > [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 2 > [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 1 > [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 0 > [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 2 > [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 1 > [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16864 and for blue: 1.14863 > [0:24:11.459325998] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 > [0:24:11.459562485] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 > [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 Request(129:C:0/1:140736750229136) > [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams > [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750230064 > [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams > [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > [0:24:11.463164722] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 194 started > [0:24:11.463730946] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 > [0:24:11.464033686] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 194 > [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 0 > [0:24:11.495739092] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 195 started > [0:24:11.496489311] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 > [0:24:11.496737385] [10629] DEBUG DelayedControls delayed_controls.cpp:285 Queue is empty, auto queue no-op. > [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 3 > [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6658 > [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 2 > [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 3 > [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 3 > [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 3 > [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 2 > [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 1 > [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 3 > [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 2 > [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16797 and for blue: 1.14978 > [0:24:11.506297848] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 > [0:24:11.506547021] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 > [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 Request(130:C:0/1:140736750225984) > [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams > [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750227664 > [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams > [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 1 > [0:24:11.528984601] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 196 started > [0:24:11.529446048] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 196 > [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 3 > [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 2 > [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 3 > [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16841 and for blue: 1.14912 > [0:24:11.550170924] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 > [0:24:11.550405629] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 > [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 Request(131:C:0/1:140736750235824) > [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams > [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF instance (536 bytes) > [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG Encode Starting:4160x3104 > > Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. > [Switching to Thread 0x7fffe7fff640 (LWP 10629)] > 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so Not nice :-S Could you please print frame.size() here ? > (gdb) bt > #0Â 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > #1Â 0x00007ffff597eda0 in EncoderLibJpeg::encode(libcamera::Span<unsigned char const, 18446744073709551615ul>, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so > #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer const&, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so > #3Â 0x00007ffff5981871 in PostProcessorJpeg::process(libcamera::FrameBuffer const&, CameraBuffer*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so > #4Â 0x00007ffff597dd8d in CameraStream::process(libcamera::FrameBuffer const&, native_handle const*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so > #5Â 0x00007ffff59717b9 in CameraDevice::requestComplete(libcamera::Request*) () from /usr/lib64/camera_hal/libcamera-hal.so > #6Â 0x00007ffff5979a7c in libcamera::BoundMethodMember<CameraDevice, void, libcamera::Request*>::activate(libcamera::Request*, bool) () from /usr/lib64/camera_hal/libcamera-hal.so > #7Â 0x00007ffff58f9051 in libcamera::Signal<libcamera::IPCMessage const&>::emit(libcamera::IPCMessage const&) () from /usr/lib64/libcamera.so > #8Â 0x00007ffff58f8fe8 in libcamera::Camera::requestComplete(libcamera::Request*) () from /usr/lib64/libcamera.so > #9Â 0x00007ffff591f28a in libcamera::PipelineHandler::completeRequest(libcamera::Request*) () from /usr/lib64/libcamera.so > #10 0x00007ffff5935cef in libcamera::IPU3CameraData::queueFrameAction(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > #11 0x00007ffff58f0b02 in libcamera::BoundMethodMember<libcamera::ipa::ipu3::IPAProxyIPU3, void, unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::activate(unsigned int, libcamera::ipa::ipu3::IPU3Action const&, bool) () from /usr/lib64/libcamera.so > #12 0x00007ffff58edf81 in libcamera::Signal<unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::emit(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > #13 0x00007ffff58eb5b4 in libcamera::ipa::ipu3::IPAProxyIPU3::queueFrameActionThread(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > #14 0x00007ffff587caed in libcamera::Object::message(libcamera::Message*) () from /usr/lib64/libcamera-base.so > #15 0x00007ffff587e00b in libcamera::Thread::dispatchMessages(libcamera::Message::Type) () from /usr/lib64/libcamera-base.so > #16 0x00007ffff5876925 in libcamera::EventDispatcherPoll::processEvents() () from /usr/lib64/libcamera-base.so > #17 0x00007ffff587d6e2 in libcamera::Thread::exec() () from /usr/lib64/libcamera-base.so > #18 0x00007ffff58fb582 in libcamera::CameraManager::Private::run() () from /usr/lib64/libcamera.so > #19 0x00007ffff587e43c in ?? () from /usr/lib64/libcamera-base.so > #20 0x0000000000000000 in ?? () > > > The most important change is in patches 13/27 and 14/27, which translate > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > It's working more or less by chance at the moment (except in qcam where > > it's broken, and possibly in other places I haven't tested). Patches > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > issue in the Android HAL. Worth being noted is patch 19/27 that > > introduces an Image class shared by cam and qcam. The class duplicates > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > rework MappedFrameBuffer into something I would be happy to see in the > > public API, but failed to do so in a reasonable amount of time, and I > > didn't want to delay this important regression fix. > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > Addition of NV12 support to the vimc kernel driver would be very nice, > > in order to test multi-planar support in our unit tests. Volunteers are > > welcome ;-) > > > > Laurent Pinchart (27): > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > libcamera: file_descriptor: Add a function to retrieve the inode > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > libcamera: formats: Move plane info structure to PixelFormatInfo > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > libcamera: formats: Support V4L2 non-contiguous formats > > libcamera: framebuffer: Move planes check to constructor > > libcamera: framebuffer: Add a function to check if planes are > > contiguous > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > libcamera: v4l2_videodevice: Take stride into account to compute > > offsets > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > libcamera: v4l2_videodevice: Use utils::enumerate() > > libcamera: framebuffer: Allocate metadata planes at construction time > > libcamera: framebuffer: Prevent modifying the number of metadata > > planes > > android: camera_device: Don't assume all planes use the same fd > > cam: Add Image class > > cam: file_sink: Use Image class to access pixel data > > cam: drm: Support per-plane stride values > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > cam: drm: Avoid importing the same dmabuf multiple times > > qcam: Print bytesused for all planes > > qcam: Use Image class to access pixel data > > qcam: viewfinder_gl: Support multi-planar buffers > > qcam: viewfinder_qt: Support multi-planar buffers > > > > include/libcamera/base/utils.h | 4 +- > > include/libcamera/file_descriptor.h | 3 + > > include/libcamera/framebuffer.h | 19 +- > > include/libcamera/internal/formats.h | 22 +- > > include/libcamera/internal/framebuffer.h | 2 + > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > src/android/camera_device.cpp | 25 +- > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > src/cam/camera_session.cpp | 4 +- > > src/cam/drm.cpp | 38 +- > > src/cam/drm.h | 7 +- > > src/cam/file_sink.cpp | 44 +-- > > src/cam/file_sink.h | 6 +- > > src/cam/image.cpp | 107 +++++ > > src/cam/image.h | 52 +++ > > src/cam/kms_sink.cpp | 28 +- > > src/cam/meson.build | 1 + > > src/libcamera/file_descriptor.cpp | 26 ++ > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > src/libcamera/framebuffer.cpp | 57 ++- > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > src/qcam/format_converter.cpp | 18 +- > > src/qcam/format_converter.h | 9 +- > > src/qcam/main_window.cpp | 38 +- > > src/qcam/main_window.h | 4 +- > > src/qcam/meson.build | 1 + > > src/qcam/viewfinder.h | 6 +- > > src/qcam/viewfinder_gl.cpp | 45 +-- > > src/qcam/viewfinder_gl.h | 4 +- > > src/qcam/viewfinder_qt.cpp | 20 +- > > src/qcam/viewfinder_qt.h | 2 +- > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > test/libtest/buffer_source.cpp | 3 +- > > test/utils.cpp | 10 +- > > 45 files changed, 911 insertions(+), 357 deletions(-) > > create mode 100644 src/cam/image.cpp > > create mode 100644 src/cam/image.h
Hi Laurent, thank you for the patch. On Mon, Sep 6, 2021 at 2:40 PM Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> wrote: > > Hi Laurent, > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > Replace manual looked for V4L2 pixel format in the PixelFormatInfo with > > the V4L2PixelFormat::fromPixelFormat() helper function. This prepares > > for multi-planar support that will modify how V4L2 pixel formats are > > stored in PixelFormatInfo. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +--- > > src/v4l2/v4l2_camera_proxy.cpp | 9 +++------ > > 2 files changed, 4 insertions(+), 9 deletions(-) > > > > diff --git a/src/libcamera/pipeline/ipu3/cio2.cpp b/src/libcamera/pipeline/ipu3/cio2.cpp > > index 9cedcb5b2879..dc62ab197acb 100644 > > --- a/src/libcamera/pipeline/ipu3/cio2.cpp > > +++ b/src/libcamera/pipeline/ipu3/cio2.cpp > > @@ -203,9 +203,7 @@ int CIO2Device::configure(const Size &size, V4L2DeviceFormat *outputFormat) > > if (itInfo == mbusCodesToPixelFormat.end()) > > return -EINVAL; > > > > - const PixelFormatInfo &info = PixelFormatInfo::info(itInfo->second); > > - > > - outputFormat->fourcc = info.v4l2Format; > > + outputFormat->fourcc = V4L2PixelFormat::fromPixelFormat(itInfo->second); > > outputFormat->size = sensorFormat.size; > > outputFormat->planesCount = 1; > > > > diff --git a/src/v4l2/v4l2_camera_proxy.cpp b/src/v4l2/v4l2_camera_proxy.cpp > > index 07b1a90aa32f..d926a7b77083 100644 > > --- a/src/v4l2/v4l2_camera_proxy.cpp > > +++ b/src/v4l2/v4l2_camera_proxy.cpp > > @@ -164,12 +164,11 @@ bool V4L2CameraProxy::validateMemoryType(uint32_t memory) > > > > void V4L2CameraProxy::setFmtFromConfig(const StreamConfiguration &streamConfig) > > { > > - const PixelFormatInfo &info = PixelFormatInfo::info(streamConfig.pixelFormat); > > const Size &size = streamConfig.size; > > > > v4l2PixFormat_.width = size.width; > > v4l2PixFormat_.height = size.height; > > - v4l2PixFormat_.pixelformat = info.v4l2Format; > > + v4l2PixFormat_.pixelformat = V4L2PixelFormat::fromPixelFormat(streamConfig.pixelFormat); > > v4l2PixFormat_.field = V4L2_FIELD_NONE; > > v4l2PixFormat_.bytesperline = streamConfig.stride; > > v4l2PixFormat_.sizeimage = streamConfig.frameSize; > > @@ -276,7 +275,7 @@ int V4L2CameraProxy::vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc * > > /* \todo Add map from format to description. */ > > utils::strlcpy(reinterpret_cast<char *>(arg->description), > > "Video Format Description", sizeof(arg->description)); > > - arg->pixelformat = PixelFormatInfo::info(format).v4l2Format; > > + arg->pixelformat = V4L2PixelFormat::fromPixelFormat(format); > > > > memset(arg->reserved, 0, sizeof(arg->reserved)); > > > > @@ -311,11 +310,9 @@ int V4L2CameraProxy::tryFormat(struct v4l2_format *arg) > > return -EINVAL; > > } > > > > - const PixelFormatInfo &info = PixelFormatInfo::info(config.pixelFormat); > > - > > arg->fmt.pix.width = config.size.width; > > arg->fmt.pix.height = config.size.height; > > - arg->fmt.pix.pixelformat = info.v4l2Format; > > + arg->fmt.pix.pixelformat = V4L2PixelFormat::fromPixelFormat(config.pixelFormat); > > arg->fmt.pix.field = V4L2_FIELD_NONE; > > arg->fmt.pix.bytesperline = config.stride; > > arg->fmt.pix.sizeimage = config.frameSize; > >
Hi Laurent, On Mon, Sep 6, 2021 at 8:41 PM Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hi Hiro, > > On Mon, Sep 06, 2021 at 08:37:26PM +0900, Hirokazu Honda wrote: > > On Mon, Sep 6, 2021 at 6:04 PM <paul.elder@ideasonboard.com> wrote: > > > On Mon, Sep 06, 2021 at 05:00:35AM +0300, Laurent Pinchart wrote: > > > > The inode is useful to check if two file descriptors refer to the same > > > > file. Add a function to retrieve it. > > > > > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > > > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > > > > > > Reviewed-by: Paul Elder <paul.elder@ideasonboard.com> > > > > > > > --- > > > > Changes since v1: > > > > > > > > - Use isValid() instead of open-coding it > > > > - Print a message on error > > > > --- > > > > include/libcamera/file_descriptor.h | 3 +++ > > > > src/libcamera/file_descriptor.cpp | 26 ++++++++++++++++++++++++++ > > > > 2 files changed, 29 insertions(+) > > > > > > > > diff --git a/include/libcamera/file_descriptor.h b/include/libcamera/file_descriptor.h > > > > index d514aac7697b..988f9b7a3d25 100644 > > > > --- a/include/libcamera/file_descriptor.h > > > > +++ b/include/libcamera/file_descriptor.h > > > > @@ -8,6 +8,7 @@ > > > > #define __LIBCAMERA_FILE_DESCRIPTOR_H__ > > > > > > > > #include <memory> > > > > +#include <sys/types.h> > > > > > > > > namespace libcamera { > > > > > > > > @@ -27,6 +28,8 @@ public: > > > > int fd() const { return fd_ ? fd_->fd() : -1; } > > > > FileDescriptor dup() const; > > > > > > > > + ino_t inode() const; > > > > + > > > > private: > > > > class Descriptor > > > > { > > > > diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp > > > > index 9f9ebc81f738..0409c3e1758c 100644 > > > > --- a/src/libcamera/file_descriptor.cpp > > > > +++ b/src/libcamera/file_descriptor.cpp > > > > @@ -8,6 +8,8 @@ > > > > #include <libcamera/file_descriptor.h> > > > > > > > > #include <string.h> > > > > +#include <sys/stat.h> > > > > +#include <sys/types.h> > > > > #include <unistd.h> > > > > #include <utility> > > > > > > > > @@ -221,6 +223,30 @@ FileDescriptor FileDescriptor::dup() const > > > > return FileDescriptor(fd()); > > > > } > > > > > > > > +/** > > > > + * \brief Retrieve the file descriptor inode > > > > + * > > > > + * \todo Should this move to the File class ? > > > > + * > > > > + * \return The file descriptor inode on success, or 0 on error > > > > + */ > > > > +ino_t FileDescriptor::inode() const > > > > +{ > > > > + if (!isValid()) > > > > + return 0; > > > > + > > > > + struct stat st; > > > > + int ret = fstat(fd_->fd(), &st); > > > > + if (ret < 0) { > > > > + ret = -errno; > > > > + LOG(FileDescriptor, Fatal) > > > > + << "Failed to fstat() fd: " << strerror(-ret); > > > > Setting errno to ret is unnecessary? > > > > if (ret < 0) { > > LOG(FileDescriptor, Fatal) << "Failed to fstat() fd: " << stderr(errno); > > return 0; > > } > > The LOG() macros constructs a LogMessage object, and it may change errno > before strerror() is called. The same is possibly true of the > operator<<() calls, or other function calls in the log line. Maybe it > can't happen in this specific case, but we always assign ret = -errno to > be safe. > Ah, I didn't notice it. Thanks for explaining. -Hiro > > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > > > > > + return 0; > > > > + } > > > > + > > > > + return st.st_ino; > > > > +} > > > > + > > > > FileDescriptor::Descriptor::Descriptor(int fd, bool duplicate) > > > > { > > > > if (!duplicate) { > > -- > Regards, > > Laurent Pinchart
Hi Laurent, On 9/6/21 5:54 PM, Laurent Pinchart wrote: > Hi Umang, > > On Mon, Sep 06, 2021 at 03:38:04PM +0530, Umang Jain wrote: >> On 9/6/21 7:30 AM, Laurent Pinchart wrote: >>> Hello everybody, >>> >>> This patch series started as an investigation of a qcam failure with >>> IPU3 after the merge of the FrameBuffer offset support. While a hack in >>> qcam would be possible, I decided to instead address the core issue and >>> fix it in V4L2VideoDevice. >>> >>> Compared to v1, the series now includes fixes for cam and qcam in >>> addition to the changes needed in the libcamera core. They have been >>> tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. >>> >>> The GStreamer element seems to work fine without any change required. >>> The V4L2 compatibility layer is still broken, and I haven't tested the >>> Android HAL yet (any volunteer ?). >> I am applied this series on top of master and tested on nautilus. >> Streaming seems fine to me, although it was appearing more choppy than >> usual. >> >> Now that I see, entire nautilus seems laggy to respond in general. One >> "Chrome" process hogging 50% CPU, seems like a transient issue. >> >> So, streaming seems fine on multiple runs, with multiple resolutions >> selections (from the camera-app menu), however, requesting MJPEG >> (shutter-click) results in a segfault. > Thanks for testing ! > >> The stack trace is below: >> >> [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams >> [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous >> [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) >> [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] (mapped) >> [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 3 >> [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 2 >> [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6644 >> [0:24:11.429563781] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 193 started >> [0:24:11.429627478] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 193 >> [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 1 >> [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 2 >> [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 2 >> [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 2 >> [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 1 >> [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 0 >> [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 2 >> [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 1 >> [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16864 and for blue: 1.14863 >> [0:24:11.459325998] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 >> [0:24:11.459562485] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 >> [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 Request(129:C:0/1:140736750229136) >> [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams >> [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750230064 >> [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams >> [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous >> [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) >> [0:24:11.463164722] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 194 started >> [0:24:11.463730946] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 >> [0:24:11.464033686] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 194 >> [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 0 >> [0:24:11.495739092] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 195 started >> [0:24:11.496489311] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 >> [0:24:11.496737385] [10629] DEBUG DelayedControls delayed_controls.cpp:285 Queue is empty, auto queue no-op. >> [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 3 >> [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6658 >> [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 2 >> [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 3 >> [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 3 >> [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 3 >> [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 2 >> [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 1 >> [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 3 >> [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 2 >> [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16797 and for blue: 1.14978 >> [0:24:11.506297848] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 >> [0:24:11.506547021] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 >> [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 Request(130:C:0/1:140736750225984) >> [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams >> [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750227664 >> [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams >> [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous >> [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) >> [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 1 >> [0:24:11.528984601] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 196 started >> [0:24:11.529446048] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 196 >> [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 3 >> [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 2 >> [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 3 >> [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 >> [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB >> [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16841 and for blue: 1.14912 >> [0:24:11.550170924] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 >> [0:24:11.550405629] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 >> [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 Request(131:C:0/1:140736750235824) >> [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams >> [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF instance (536 bytes) >> [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG Encode Starting:4160x3104 >> >> Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. >> [Switching to Thread 0x7fffe7fff640 (LWP 10629)] >> 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > Not nice :-S Could you please print frame.size() here ? [3:11:40.303057525] [25328] DEBUG EXIF exif.cpp:522 Created EXIF instance (536 bytes) [3:11:40.368109082] [25328] DEBUG JPEG encoder_libjpeg.cpp:222 JPEG Encode Starting:4160x3104 [3:11:40.368158138] [25328] DEBUG JPEG encoder_libjpeg.cpp:128 Frame size : 12912640 Segmentation fault (core dumped) > >> (gdb) bt >> #0Â 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so >> #1Â 0x00007ffff597eda0 in EncoderLibJpeg::encode(libcamera::Span<unsigned char const, 18446744073709551615ul>, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so >> #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer const&, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so >> #3Â 0x00007ffff5981871 in PostProcessorJpeg::process(libcamera::FrameBuffer const&, CameraBuffer*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so >> #4Â 0x00007ffff597dd8d in CameraStream::process(libcamera::FrameBuffer const&, native_handle const*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so >> #5Â 0x00007ffff59717b9 in CameraDevice::requestComplete(libcamera::Request*) () from /usr/lib64/camera_hal/libcamera-hal.so >> #6Â 0x00007ffff5979a7c in libcamera::BoundMethodMember<CameraDevice, void, libcamera::Request*>::activate(libcamera::Request*, bool) () from /usr/lib64/camera_hal/libcamera-hal.so >> #7Â 0x00007ffff58f9051 in libcamera::Signal<libcamera::IPCMessage const&>::emit(libcamera::IPCMessage const&) () from /usr/lib64/libcamera.so >> #8Â 0x00007ffff58f8fe8 in libcamera::Camera::requestComplete(libcamera::Request*) () from /usr/lib64/libcamera.so >> #9Â 0x00007ffff591f28a in libcamera::PipelineHandler::completeRequest(libcamera::Request*) () from /usr/lib64/libcamera.so >> #10 0x00007ffff5935cef in libcamera::IPU3CameraData::queueFrameAction(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so >> #11 0x00007ffff58f0b02 in libcamera::BoundMethodMember<libcamera::ipa::ipu3::IPAProxyIPU3, void, unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::activate(unsigned int, libcamera::ipa::ipu3::IPU3Action const&, bool) () from /usr/lib64/libcamera.so >> #12 0x00007ffff58edf81 in libcamera::Signal<unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::emit(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so >> #13 0x00007ffff58eb5b4 in libcamera::ipa::ipu3::IPAProxyIPU3::queueFrameActionThread(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so >> #14 0x00007ffff587caed in libcamera::Object::message(libcamera::Message*) () from /usr/lib64/libcamera-base.so >> #15 0x00007ffff587e00b in libcamera::Thread::dispatchMessages(libcamera::Message::Type) () from /usr/lib64/libcamera-base.so >> #16 0x00007ffff5876925 in libcamera::EventDispatcherPoll::processEvents() () from /usr/lib64/libcamera-base.so >> #17 0x00007ffff587d6e2 in libcamera::Thread::exec() () from /usr/lib64/libcamera-base.so >> #18 0x00007ffff58fb582 in libcamera::CameraManager::Private::run() () from /usr/lib64/libcamera.so >> #19 0x00007ffff587e43c in ?? () from /usr/lib64/libcamera-base.so >> #20 0x0000000000000000 in ?? () >> >>> The most important change is in patches 13/27 and 14/27, which translate >>> between V4L2 buffers and libcamera FrameBuffer to handle the case where >>> a multi-planar frame buffer is used with the V4L2 single-planar API. >>> It's working more or less by chance at the moment (except in qcam where >>> it's broken, and possibly in other places I haven't tested). Patches >>> 01/27 to 12/27 are cleanups and additions to prepare for the work in >>> V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches >>> 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. >>> >>> Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an >>> issue in the Android HAL. Worth being noted is patch 19/27 that >>> introduces an Image class shared by cam and qcam. The class duplicates >>> the MappedFrameBuffer implementation private to libcamera. I've tried to >>> rework MappedFrameBuffer into something I would be happy to see in the >>> public API, but failed to do so in a reasonable amount of time, and I >>> didn't want to delay this important regression fix. >>> >>> This series doesn't break any unit test, as vimc doesn't support NV12. >>> Addition of NV12 support to the vimc kernel driver would be very nice, >>> in order to test multi-planar support in our unit tests. Volunteers are >>> welcome ;-) >>> >>> Laurent Pinchart (27): >>> libcamera: base: utils: Use size_t for index in utils::enumerate() >>> libcamera: file_descriptor: Add a function to retrieve the inode >>> libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() >>> libcamera: Use V4L2PixelFormat::fromPixelFormat() >>> libcamera: formats: Move plane info structure to PixelFormatInfo >>> libcamera: formats: Add planeSize() helpers to PixelFormatInfo >>> libcamera: formats: Support V4L2 non-contiguous formats >>> libcamera: framebuffer: Move planes check to constructor >>> libcamera: framebuffer: Add a function to check if planes are >>> contiguous >>> libcamera: v4l2_videodevice: Cache PixelFormatInfo >>> libcamera: v4l2_videodevice: Document plane handling in createBuffer() >>> libcamera: v4l2_videodevice: Take stride into account to compute >>> offsets >>> libcamera: v4l2_videodevice: Coalesce planes when queuing buffer >>> libcamera: v4l2_videodevice: Split planes when dequeuing buffer >>> libcamera: v4l2_videodevice: Use utils::enumerate() >>> libcamera: framebuffer: Allocate metadata planes at construction time >>> libcamera: framebuffer: Prevent modifying the number of metadata >>> planes >>> android: camera_device: Don't assume all planes use the same fd >>> cam: Add Image class >>> cam: file_sink: Use Image class to access pixel data >>> cam: drm: Support per-plane stride values >>> cam: drm: Set per-plane offsets when creating DRM frame buffer >>> cam: drm: Avoid importing the same dmabuf multiple times >>> qcam: Print bytesused for all planes >>> qcam: Use Image class to access pixel data >>> qcam: viewfinder_gl: Support multi-planar buffers >>> qcam: viewfinder_qt: Support multi-planar buffers >>> >>> include/libcamera/base/utils.h | 4 +- >>> include/libcamera/file_descriptor.h | 3 + >>> include/libcamera/framebuffer.h | 19 +- >>> include/libcamera/internal/formats.h | 22 +- >>> include/libcamera/internal/framebuffer.h | 2 + >>> include/libcamera/internal/v4l2_pixelformat.h | 2 +- >>> include/libcamera/internal/v4l2_videodevice.h | 3 +- >>> src/android/camera_device.cpp | 25 +- >>> src/android/mm/generic_camera_buffer.cpp | 11 +- >>> src/android/yuv/post_processor_yuv.cpp | 10 +- >>> src/cam/camera_session.cpp | 4 +- >>> src/cam/drm.cpp | 38 +- >>> src/cam/drm.h | 7 +- >>> src/cam/file_sink.cpp | 44 +-- >>> src/cam/file_sink.h | 6 +- >>> src/cam/image.cpp | 107 +++++ >>> src/cam/image.h | 52 +++ >>> src/cam/kms_sink.cpp | 28 +- >>> src/cam/meson.build | 1 + >>> src/libcamera/file_descriptor.cpp | 26 ++ >>> src/libcamera/formats.cpp | 373 ++++++++++++++---- >>> src/libcamera/framebuffer.cpp | 57 ++- >>> src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- >>> src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- >>> .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- >>> src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- >>> src/libcamera/pipeline/simple/converter.cpp | 8 +- >>> src/libcamera/pipeline/simple/simple.cpp | 4 +- >>> src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- >>> src/libcamera/pipeline/vimc/vimc.cpp | 8 +- >>> src/libcamera/v4l2_pixelformat.cpp | 11 +- >>> src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- >>> src/qcam/format_converter.cpp | 18 +- >>> src/qcam/format_converter.h | 9 +- >>> src/qcam/main_window.cpp | 38 +- >>> src/qcam/main_window.h | 4 +- >>> src/qcam/meson.build | 1 + >>> src/qcam/viewfinder.h | 6 +- >>> src/qcam/viewfinder_gl.cpp | 45 +-- >>> src/qcam/viewfinder_gl.h | 4 +- >>> src/qcam/viewfinder_qt.cpp | 20 +- >>> src/qcam/viewfinder_qt.h | 2 +- >>> src/v4l2/v4l2_camera_proxy.cpp | 11 +- >>> test/libtest/buffer_source.cpp | 3 +- >>> test/utils.cpp | 10 +- >>> 45 files changed, 911 insertions(+), 357 deletions(-) >>> create mode 100644 src/cam/image.cpp >>> create mode 100644 src/cam/image.h
Hi Laurent, thank you for the patch. On Mon, Sep 6, 2021 at 4:13 PM Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> wrote: > > Hi Laurent, > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > When creating FrameBuffer instances, the V4L2VideoDevice computes plane > > offsets using minimal stride for the format. This doesn't always produce > > a valid result when the device requires padding at the end of lines. Fix > > it by computing offsets using the stride reported by V4L2. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > src/libcamera/v4l2_videodevice.cpp | 16 +++++++++++++--- > > 1 file changed, 13 insertions(+), 3 deletions(-) > > > > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > > index 88535f5a07c7..c6c9263c49e9 100644 > > --- a/src/libcamera/v4l2_videodevice.cpp > > +++ b/src/libcamera/v4l2_videodevice.cpp > > @@ -1354,11 +1354,21 @@ std::unique_ptr<FrameBuffer> V4L2VideoDevice::createBuffer(unsigned int index) > > size_t offset = 0; > > > > for (size_t i = 0; i < planes.size(); ++i) { > > + /* > > + * The stride is reported by V4L2 for the first plane > > + * only. Compute the stride of the other planes by > > + * taking the horizontal subsampling factor into > > + * account, which is equal to the bytesPerGroup ratio of > > + * the planes. > > + */ > > + unsigned int stride = format_.planes[0].bpl > > + * formatInfo_->planes[i].bytesPerGroup > > + / formatInfo_->planes[0].bytesPerGroup; > > + > > planes[i].fd = fd; > > planes[i].offset = offset; > > - > > - /* \todo Take the V4L2 stride into account */ > > - planes[i].length = formatInfo_->planeSize(format_.size, i); > > + planes[i].length = formatInfo_->planeSize(format_.size.height, > > + i, stride); > > offset += planes[i].length; > > } > > } > >
Hi Laurent, On Mon, Sep 6, 2021 at 11:01 AM Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > When queueing a buffer to a V4L2VideoDevice, the number of planes in the > FrameBuffer may not match the number of V4L2 buffer planes if the > PixelFormat is multi-planar (has multiple colour planes) and the V4L2 > format is single-planar (has a single buffer plane). In this case, we > need to coalesce all FrameBuffer planes into a single V4L2 buffer plane. > Do so, and add validity checks to reject frame buffers that can't be > described using a single V4L2 buffer plane. > > This change prepares for proper multi-planar support, but isn't expected > to result in a change of behaviour with existing pipeline handlers, as > none of them queue an output buffer with multiple FrameBuffer planes or > use non-contiguous buffers for either capture or output. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > --- > Changes since v1: > > - Make numV4l2Planes const > - Use format_.planesCount > --- > src/libcamera/v4l2_videodevice.cpp | 67 +++++++++++++++++++++++++----- > 1 file changed, 57 insertions(+), 10 deletions(-) > > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > index c6c9263c49e9..625d5da40337 100644 > --- a/src/libcamera/v4l2_videodevice.cpp > +++ b/src/libcamera/v4l2_videodevice.cpp > @@ -22,10 +22,12 @@ > > #include <libcamera/base/event_notifier.h> > #include <libcamera/base/log.h> > +#include <libcamera/base/utils.h> > > #include <libcamera/file_descriptor.h> > > #include "libcamera/internal/formats.h" > +#include "libcamera/internal/framebuffer.h" > #include "libcamera/internal/media_device.h" > #include "libcamera/internal/media_object.h" > > @@ -1496,10 +1498,20 @@ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer) > > bool multiPlanar = V4L2_TYPE_IS_MULTIPLANAR(buf.type); > const std::vector<FrameBuffer::Plane> &planes = buffer->planes(); > + const unsigned int numV4l2Planes = format_.planesCount; > + > + /* > + * If the frame buffer has multiple planes and the V4L2 format requires > + * contiguous planes, ensure that's the case. > + */ > + if (planes.size() != numV4l2Planes && !buffer->_d()->isContiguous()) { > + LOG(V4L2, Error) << "Device format requires contiguous buffer"; > + return -EINVAL; > + } nit: shall we also check planes.size() >= numV4l2Planes? -Hiro > > if (buf.memory == V4L2_MEMORY_DMABUF) { > if (multiPlanar) { > - for (unsigned int p = 0; p < planes.size(); ++p) > + for (unsigned int p = 0; p < numV4l2Planes; ++p) > v4l2Planes[p].m.fd = planes[p].fd.fd(); > } else { > buf.m.fd = planes[0].fd.fd(); > @@ -1507,23 +1519,58 @@ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer) > } > > if (multiPlanar) { > - buf.length = planes.size(); > + buf.length = numV4l2Planes; > buf.m.planes = v4l2Planes; > } > > if (V4L2_TYPE_IS_OUTPUT(buf.type)) { > const FrameMetadata &metadata = buffer->metadata(); > > - if (multiPlanar) { > - unsigned int nplane = 0; > - for (const FrameMetadata::Plane &plane : metadata.planes) { > - v4l2Planes[nplane].bytesused = plane.bytesused; > - v4l2Planes[nplane].length = buffer->planes()[nplane].length; > - nplane++; > + if (numV4l2Planes != planes.size()) { > + /* > + * If we have a multi-planar buffer with a V4L2 > + * single-planar format, coalesce all planes. The length > + * and number of bytes used may only differ in the last > + * plane as any other situation can't be represented. > + */ > + unsigned int bytesused = 0; > + unsigned int length = 0; > + > + for (auto [i, plane] : utils::enumerate(planes)) { > + bytesused += metadata.planes[i].bytesused; > + length += plane.length; > + > + if (i != planes.size() - 1 && bytesused != length) { > + LOG(V4L2, Error) > + << "Holes in multi-planar buffer not supported"; > + return -EINVAL; > + } > + } > + > + if (multiPlanar) { > + v4l2Planes[0].bytesused = bytesused; > + v4l2Planes[0].length = length; > + } else { > + buf.bytesused = bytesused; > + buf.length = length; > + } > + } else if (multiPlanar) { > + /* > + * If we use the multi-planar API, fill in the planes. > + * The number of planes in the frame buffer and in the > + * V4L2 buffer is guaranteed to be equal at this point. > + */ > + for (auto [i, plane] : utils::enumerate(planes)) { > + v4l2Planes[i].bytesused = metadata.planes[i].bytesused; > + v4l2Planes[i].length = plane.length; > } > } else { > - if (metadata.planes.size()) > - buf.bytesused = metadata.planes[0].bytesused; > + /* > + * Single-planar API with a single plane in the buffer > + * is trivial to handle. > + */ > + buf.bytesused = metadata.planes[0].bytesused; > + buf.length = planes[0].length; > } > > buf.sequence = metadata.sequence; > -- > Regards, > > Laurent Pinchart >
Hi Umang, On Mon, Sep 06, 2021 at 06:24:21PM +0530, Umang Jain wrote: > On 9/6/21 5:54 PM, Laurent Pinchart wrote: > > On Mon, Sep 06, 2021 at 03:38:04PM +0530, Umang Jain wrote: > >> On 9/6/21 7:30 AM, Laurent Pinchart wrote: > >>> Hello everybody, > >>> > >>> This patch series started as an investigation of a qcam failure with > >>> IPU3 after the merge of the FrameBuffer offset support. While a hack in > >>> qcam would be possible, I decided to instead address the core issue and > >>> fix it in V4L2VideoDevice. > >>> > >>> Compared to v1, the series now includes fixes for cam and qcam in > >>> addition to the changes needed in the libcamera core. They have been > >>> tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > >>> > >>> The GStreamer element seems to work fine without any change required. > >>> The V4L2 compatibility layer is still broken, and I haven't tested the > >>> Android HAL yet (any volunteer ?). > >> I am applied this series on top of master and tested on nautilus. > >> Streaming seems fine to me, although it was appearing more choppy than > >> usual. > >> > >> Now that I see, entire nautilus seems laggy to respond in general. One > >> "Chrome" process hogging 50% CPU, seems like a transient issue. > >> > >> So, streaming seems fine on multiple runs, with multiple resolutions > >> selections (from the camera-app menu), however, requesting MJPEG > >> (shutter-click) results in a segfault. > > Thanks for testing ! > > > >> The stack trace is below: > >> > >> [0:24:11.415660787] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750235824 with 2 streams > >> [0:24:11.415858538] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > >> [0:24:11.415905500] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > >> [0:24:11.415943620] [10635] DEBUG HAL camera_device.cpp:966 '\_SB_.PCI0.I2C2.CAM0': 1 - (4160x3104)[0x00000021] -> (4160x3104)[NV12] (mapped) > >> [0:24:11.416461639] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 3 > >> [0:24:11.428903141] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 2 > >> [0:24:11.429095316] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6644 > >> [0:24:11.429563781] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 193 started > >> [0:24:11.429627478] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 193 > >> [0:24:11.429683048] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 1 > >> [0:24:11.436615191] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 2 > >> [0:24:11.436718629] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 2 > >> [0:24:11.436799420] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 2 > >> [0:24:11.457388821] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 1 > >> [0:24:11.457948159] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 0 > >> [0:24:11.458257692] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 2 > >> [0:24:11.458525315] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 1 > >> [0:24:11.458968848] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > >> [0:24:11.459005634] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > >> [0:24:11.459082715] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16864 and for blue: 1.14863 > >> [0:24:11.459325998] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 194 > >> [0:24:11.459562485] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 194 > >> [0:24:11.459815347] [10629] DEBUG Request request.cpp:291 Request(129:C:0/1:140736750229136) > >> [0:24:11.460390126] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750229136 completed with 1 streams > >> [0:24:11.461855854] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750230064 > >> [0:24:11.461939858] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750230064 with 1 streams > >> [0:24:11.462057435] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > >> [0:24:11.462098906] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > >> [0:24:11.463164722] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 194 started > >> [0:24:11.463730946] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 193 > >> [0:24:11.464033686] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 194 > >> [0:24:11.464329869] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 0 > >> [0:24:11.495739092] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 195 started > >> [0:24:11.496489311] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Analogue Gain to 477 at index 194 > >> [0:24:11.496737385] [10629] DEBUG DelayedControls delayed_controls.cpp:285 Queue is empty, auto queue no-op. > >> [0:24:11.497044311] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video0[37:cap]: Dequeuing buffer 3 > >> [0:24:11.497335155] [10643] DEBUG IPU3Awb awb.cpp:331 Color temperature estimated: 6658 > >> [0:24:11.497528845] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video6[25:cap]: Queueing buffer 2 > >> [0:24:11.503589322] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video5[27:out]: Queueing buffer 3 > >> [0:24:11.504027344] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video8[28:cap]: Queueing buffer 3 > >> [0:24:11.504287330] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video4[24:out]: Queueing buffer 3 > >> [0:24:11.504712501] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 2 > >> [0:24:11.505005096] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 1 > >> [0:24:11.505260331] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video5[27:out]: Dequeuing buffer 3 > >> [0:24:11.505506837] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 2 > >> [0:24:11.505940926] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > >> [0:24:11.505976974] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > >> [0:24:11.506057427] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16797 and for blue: 1.14978 > >> [0:24:11.506297848] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 196 > >> [0:24:11.506547021] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 196 > >> [0:24:11.506796456] [10629] DEBUG Request request.cpp:291 Request(130:C:0/1:140736750225984) > >> [0:24:11.507374522] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750225984 completed with 1 streams > >> [0:24:11.509426987] [10635] DEBUG Request request.cpp:94 Created request - cookie: 140736750227664 > >> [0:24:11.509507039] [10635] DEBUG HAL camera_device.cpp:941 '\_SB_.PCI0.I2C2.CAM0': Queueing request 140736750227664 with 1 streams > >> [0:24:11.509630706] [10635] DEBUG Buffer framebuffer.cpp:249 Buffer is not contiguous > >> [0:24:11.509675200] [10635] DEBUG HAL camera_device.cpp:980 '\_SB_.PCI0.I2C2.CAM0': 0 - (4160x3104)[0x00000023] -> (4160x3104)[NV12] (direct) > >> [0:24:11.511309605] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1582 /dev/video0[37:cap]: Queueing buffer 1 > >> [0:24:11.528984601] [10629] DEBUG DelayedControls delayed_controls.cpp:237 frame 196 started > >> [0:24:11.529446048] [10629] DEBUG DelayedControls delayed_controls.cpp:272 Setting Exposure to 269 at index 196 > >> [0:24:11.548431632] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video4[24:out]: Dequeuing buffer 3 > >> [0:24:11.548993830] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video6[25:cap]: Dequeuing buffer 2 > >> [0:24:11.549320802] [10629] DEBUG V4L2 v4l2_videodevice.cpp:1650 /dev/video8[28:cap]: Dequeuing buffer 3 > >> [0:24:11.549807736] [10643] DEBUG IPU3Awb awb.cpp:270 Valid zones: 186 > >> [0:24:11.549846777] [10643] DEBUG IPU3Awb awb.cpp:224 Grey world AWB > >> [0:24:11.549927350] [10643] DEBUG IPU3Awb awb.cpp:273 Gain found for red: 1.16841 and for blue: 1.14912 > >> [0:24:11.550170924] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Exposure to 269 at index 197 > >> [0:24:11.550405629] [10629] DEBUG DelayedControls delayed_controls.cpp:179 Queuing Analogue Gain to 477 at index 197 > >> [0:24:11.550646460] [10629] DEBUG Request request.cpp:291 Request(131:C:0/1:140736750235824) > >> [0:24:11.551245104] [10629] DEBUG HAL camera_device.cpp:1117 '\_SB_.PCI0.I2C2.CAM0': Request 140736750235824 completed with 2 streams > >> [0:24:11.562307680] [10629] DEBUG EXIF exif.cpp:522 Created EXIF instance (536 bytes) > >> [0:24:11.634800113] [10629] DEBUG JPEG encoder_libjpeg.cpp:220 JPEG Encode Starting:4160x3104 > >> > >> Thread 6 "CameraModuleThr" received signal SIGSEGV, Segmentation fault. > >> [Switching to Thread 0x7fffe7fff640 (LWP 10629)] > >> 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > > > > Not nice :-S Could you please print frame.size() here ? > > [3:11:40.303057525] [25328] DEBUG EXIF exif.cpp:522 Created EXIF > instance (536 bytes) > [3:11:40.368109082] [25328] DEBUG JPEG encoder_libjpeg.cpp:222 JPEG > Encode Starting:4160x3104 > [3:11:40.368158138] [25328] DEBUG JPEG encoder_libjpeg.cpp:128 Frame > size : 12912640 > Segmentation fault (core dumped) Looks like the encoder gets the first plane only. I'll have a look. > >> (gdb) bt > >> #0Â 0x00007ffff597ead5 in EncoderLibJpeg::compressNV(libcamera::Span<unsigned char const, 18446744073709551615ul>) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #1Â 0x00007ffff597eda0 in EncoderLibJpeg::encode(libcamera::Span<unsigned char const, 18446744073709551615ul>, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #2Â 0x00007ffff597ec53 in EncoderLibJpeg::encode(libcamera::FrameBuffer const&, libcamera::Span<unsigned char, 18446744073709551615ul>, libcamera::Span<unsigned char const, 18446744073709551615ul>, unsigned int) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #3Â 0x00007ffff5981871 in PostProcessorJpeg::process(libcamera::FrameBuffer const&, CameraBuffer*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #4Â 0x00007ffff597dd8d in CameraStream::process(libcamera::FrameBuffer const&, native_handle const*, CameraMetadata const&, CameraMetadata*) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #5Â 0x00007ffff59717b9 in CameraDevice::requestComplete(libcamera::Request*) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #6Â 0x00007ffff5979a7c in libcamera::BoundMethodMember<CameraDevice, void, libcamera::Request*>::activate(libcamera::Request*, bool) () from /usr/lib64/camera_hal/libcamera-hal.so > >> #7Â 0x00007ffff58f9051 in libcamera::Signal<libcamera::IPCMessage const&>::emit(libcamera::IPCMessage const&) () from /usr/lib64/libcamera.so > >> #8Â 0x00007ffff58f8fe8 in libcamera::Camera::requestComplete(libcamera::Request*) () from /usr/lib64/libcamera.so > >> #9Â 0x00007ffff591f28a in libcamera::PipelineHandler::completeRequest(libcamera::Request*) () from /usr/lib64/libcamera.so > >> #10 0x00007ffff5935cef in libcamera::IPU3CameraData::queueFrameAction(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > >> #11 0x00007ffff58f0b02 in libcamera::BoundMethodMember<libcamera::ipa::ipu3::IPAProxyIPU3, void, unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::activate(unsigned int, libcamera::ipa::ipu3::IPU3Action const&, bool) () from /usr/lib64/libcamera.so > >> #12 0x00007ffff58edf81 in libcamera::Signal<unsigned int, libcamera::ipa::ipu3::IPU3Action const&>::emit(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > >> #13 0x00007ffff58eb5b4 in libcamera::ipa::ipu3::IPAProxyIPU3::queueFrameActionThread(unsigned int, libcamera::ipa::ipu3::IPU3Action const&) () from /usr/lib64/libcamera.so > >> #14 0x00007ffff587caed in libcamera::Object::message(libcamera::Message*) () from /usr/lib64/libcamera-base.so > >> #15 0x00007ffff587e00b in libcamera::Thread::dispatchMessages(libcamera::Message::Type) () from /usr/lib64/libcamera-base.so > >> #16 0x00007ffff5876925 in libcamera::EventDispatcherPoll::processEvents() () from /usr/lib64/libcamera-base.so > >> #17 0x00007ffff587d6e2 in libcamera::Thread::exec() () from /usr/lib64/libcamera-base.so > >> #18 0x00007ffff58fb582 in libcamera::CameraManager::Private::run() () from /usr/lib64/libcamera.so > >> #19 0x00007ffff587e43c in ?? () from /usr/lib64/libcamera-base.so > >> #20 0x0000000000000000 in ?? () > >> > >>> The most important change is in patches 13/27 and 14/27, which translate > >>> between V4L2 buffers and libcamera FrameBuffer to handle the case where > >>> a multi-planar frame buffer is used with the V4L2 single-planar API. > >>> It's working more or less by chance at the moment (except in qcam where > >>> it's broken, and possibly in other places I haven't tested). Patches > >>> 01/27 to 12/27 are cleanups and additions to prepare for the work in > >>> V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > >>> 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > >>> > >>> Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > >>> issue in the Android HAL. Worth being noted is patch 19/27 that > >>> introduces an Image class shared by cam and qcam. The class duplicates > >>> the MappedFrameBuffer implementation private to libcamera. I've tried to > >>> rework MappedFrameBuffer into something I would be happy to see in the > >>> public API, but failed to do so in a reasonable amount of time, and I > >>> didn't want to delay this important regression fix. > >>> > >>> This series doesn't break any unit test, as vimc doesn't support NV12. > >>> Addition of NV12 support to the vimc kernel driver would be very nice, > >>> in order to test multi-planar support in our unit tests. Volunteers are > >>> welcome ;-) > >>> > >>> Laurent Pinchart (27): > >>> libcamera: base: utils: Use size_t for index in utils::enumerate() > >>> libcamera: file_descriptor: Add a function to retrieve the inode > >>> libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > >>> libcamera: Use V4L2PixelFormat::fromPixelFormat() > >>> libcamera: formats: Move plane info structure to PixelFormatInfo > >>> libcamera: formats: Add planeSize() helpers to PixelFormatInfo > >>> libcamera: formats: Support V4L2 non-contiguous formats > >>> libcamera: framebuffer: Move planes check to constructor > >>> libcamera: framebuffer: Add a function to check if planes are > >>> contiguous > >>> libcamera: v4l2_videodevice: Cache PixelFormatInfo > >>> libcamera: v4l2_videodevice: Document plane handling in createBuffer() > >>> libcamera: v4l2_videodevice: Take stride into account to compute > >>> offsets > >>> libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > >>> libcamera: v4l2_videodevice: Split planes when dequeuing buffer > >>> libcamera: v4l2_videodevice: Use utils::enumerate() > >>> libcamera: framebuffer: Allocate metadata planes at construction time > >>> libcamera: framebuffer: Prevent modifying the number of metadata > >>> planes > >>> android: camera_device: Don't assume all planes use the same fd > >>> cam: Add Image class > >>> cam: file_sink: Use Image class to access pixel data > >>> cam: drm: Support per-plane stride values > >>> cam: drm: Set per-plane offsets when creating DRM frame buffer > >>> cam: drm: Avoid importing the same dmabuf multiple times > >>> qcam: Print bytesused for all planes > >>> qcam: Use Image class to access pixel data > >>> qcam: viewfinder_gl: Support multi-planar buffers > >>> qcam: viewfinder_qt: Support multi-planar buffers > >>> > >>> include/libcamera/base/utils.h | 4 +- > >>> include/libcamera/file_descriptor.h | 3 + > >>> include/libcamera/framebuffer.h | 19 +- > >>> include/libcamera/internal/formats.h | 22 +- > >>> include/libcamera/internal/framebuffer.h | 2 + > >>> include/libcamera/internal/v4l2_pixelformat.h | 2 +- > >>> include/libcamera/internal/v4l2_videodevice.h | 3 +- > >>> src/android/camera_device.cpp | 25 +- > >>> src/android/mm/generic_camera_buffer.cpp | 11 +- > >>> src/android/yuv/post_processor_yuv.cpp | 10 +- > >>> src/cam/camera_session.cpp | 4 +- > >>> src/cam/drm.cpp | 38 +- > >>> src/cam/drm.h | 7 +- > >>> src/cam/file_sink.cpp | 44 +-- > >>> src/cam/file_sink.h | 6 +- > >>> src/cam/image.cpp | 107 +++++ > >>> src/cam/image.h | 52 +++ > >>> src/cam/kms_sink.cpp | 28 +- > >>> src/cam/meson.build | 1 + > >>> src/libcamera/file_descriptor.cpp | 26 ++ > >>> src/libcamera/formats.cpp | 373 ++++++++++++++---- > >>> src/libcamera/framebuffer.cpp | 57 ++- > >>> src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > >>> src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > >>> .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > >>> src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > >>> src/libcamera/pipeline/simple/converter.cpp | 8 +- > >>> src/libcamera/pipeline/simple/simple.cpp | 4 +- > >>> src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > >>> src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > >>> src/libcamera/v4l2_pixelformat.cpp | 11 +- > >>> src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > >>> src/qcam/format_converter.cpp | 18 +- > >>> src/qcam/format_converter.h | 9 +- > >>> src/qcam/main_window.cpp | 38 +- > >>> src/qcam/main_window.h | 4 +- > >>> src/qcam/meson.build | 1 + > >>> src/qcam/viewfinder.h | 6 +- > >>> src/qcam/viewfinder_gl.cpp | 45 +-- > >>> src/qcam/viewfinder_gl.h | 4 +- > >>> src/qcam/viewfinder_qt.cpp | 20 +- > >>> src/qcam/viewfinder_qt.h | 2 +- > >>> src/v4l2/v4l2_camera_proxy.cpp | 11 +- > >>> test/libtest/buffer_source.cpp | 3 +- > >>> test/utils.cpp | 10 +- > >>> 45 files changed, 911 insertions(+), 357 deletions(-) > >>> create mode 100644 src/cam/image.cpp > >>> create mode 100644 src/cam/image.h
Hi Laurent Thanks for the reply! On Mon, 6 Sept 2021 at 13:14, Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hi David, > > On Mon, Sep 06, 2021 at 12:22:25PM +0100, David Plowman wrote: > > Hi Laurent, everyone > > > > Thanks for all the efforts to get this working! I had just a little > > question or two... > > > > 1. Is it easy to tell if a FrameBuffer is actually single plane or > > multi plane? If not, could we add a public API function that would > > tell us? > > You can use FrameBuffer::planes().size() to get the number of planes. Or > did you mean checking if the different planes in a multi-planar frame > buffer are stored contiguously in the same dmabuf ? There's a private > helper for that (FrameBuffer::Private::isContiguous()), I haven't made > it public yet as I wanted to evaluate the use cases first. Yes, isContiguous() sounds pretty much like what I want. I'm interested in functions that make it more convenient for me to know how I pass a buffer (for example) to my V4L2 h.264 encoder. > > > 2. Is it easy to get the full size of the buffer for the single plane > > case (rather than having to add all the bits up)? And again, if the > > answer is no, could we add such a thing? > > > > I'm thinking of trying to make life easy for applications that might > > want to pass these buffers to codecs where the driver might only > > support single planes. Not thinking of any platform in particular... > > :) > > It again depends what you mean :-) If the FrameBuffer has a single > plane, FrameBuffer::planes()[0].length (and > FrameMetaData::planes()[0].bytesused) will give you what you need. I > suspect you're however consider the case of a multi-planar FrameBuffer > with planes stored contiguously in memory, using the single-planar V4L2 > formats (e.g. V4L2_PIX_FMT_NV12, as opposed to V4L2_PIX_FMT_NV12M). I'm > a bit worried that a helper function in that case would be used by > applications to ignore that the buffer can be truly multi-planar. Perhaps we could define a "safe" version of the function that complains if it was called on a truly multi planar buffer? It might return zero, or print a warning to the console - would that help? Best regards David > > > On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart wrote: > > > > > > Hello everybody, > > > > > > This patch series started as an investigation of a qcam failure with > > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > > qcam would be possible, I decided to instead address the core issue and > > > fix it in V4L2VideoDevice. > > > > > > Compared to v1, the series now includes fixes for cam and qcam in > > > addition to the changes needed in the libcamera core. They have been > > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > > > The GStreamer element seems to work fine without any change required. > > > The V4L2 compatibility layer is still broken, and I haven't tested the > > > Android HAL yet (any volunteer ?). > > > > > > The most important change is in patches 13/27 and 14/27, which translate > > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > > It's working more or less by chance at the moment (except in qcam where > > > it's broken, and possibly in other places I haven't tested). Patches > > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > > issue in the Android HAL. Worth being noted is patch 19/27 that > > > introduces an Image class shared by cam and qcam. The class duplicates > > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > > rework MappedFrameBuffer into something I would be happy to see in the > > > public API, but failed to do so in a reasonable amount of time, and I > > > didn't want to delay this important regression fix. > > > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > > Addition of NV12 support to the vimc kernel driver would be very nice, > > > in order to test multi-planar support in our unit tests. Volunteers are > > > welcome ;-) > > > > > > Laurent Pinchart (27): > > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > > libcamera: file_descriptor: Add a function to retrieve the inode > > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > > libcamera: formats: Move plane info structure to PixelFormatInfo > > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > > libcamera: formats: Support V4L2 non-contiguous formats > > > libcamera: framebuffer: Move planes check to constructor > > > libcamera: framebuffer: Add a function to check if planes are > > > contiguous > > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > > libcamera: v4l2_videodevice: Take stride into account to compute > > > offsets > > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > > libcamera: v4l2_videodevice: Use utils::enumerate() > > > libcamera: framebuffer: Allocate metadata planes at construction time > > > libcamera: framebuffer: Prevent modifying the number of metadata > > > planes > > > android: camera_device: Don't assume all planes use the same fd > > > cam: Add Image class > > > cam: file_sink: Use Image class to access pixel data > > > cam: drm: Support per-plane stride values > > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > > cam: drm: Avoid importing the same dmabuf multiple times > > > qcam: Print bytesused for all planes > > > qcam: Use Image class to access pixel data > > > qcam: viewfinder_gl: Support multi-planar buffers > > > qcam: viewfinder_qt: Support multi-planar buffers > > > > > > include/libcamera/base/utils.h | 4 +- > > > include/libcamera/file_descriptor.h | 3 + > > > include/libcamera/framebuffer.h | 19 +- > > > include/libcamera/internal/formats.h | 22 +- > > > include/libcamera/internal/framebuffer.h | 2 + > > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > > src/android/camera_device.cpp | 25 +- > > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > > src/cam/camera_session.cpp | 4 +- > > > src/cam/drm.cpp | 38 +- > > > src/cam/drm.h | 7 +- > > > src/cam/file_sink.cpp | 44 +-- > > > src/cam/file_sink.h | 6 +- > > > src/cam/image.cpp | 107 +++++ > > > src/cam/image.h | 52 +++ > > > src/cam/kms_sink.cpp | 28 +- > > > src/cam/meson.build | 1 + > > > src/libcamera/file_descriptor.cpp | 26 ++ > > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > > src/libcamera/framebuffer.cpp | 57 ++- > > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > > src/qcam/format_converter.cpp | 18 +- > > > src/qcam/format_converter.h | 9 +- > > > src/qcam/main_window.cpp | 38 +- > > > src/qcam/main_window.h | 4 +- > > > src/qcam/meson.build | 1 + > > > src/qcam/viewfinder.h | 6 +- > > > src/qcam/viewfinder_gl.cpp | 45 +-- > > > src/qcam/viewfinder_gl.h | 4 +- > > > src/qcam/viewfinder_qt.cpp | 20 +- > > > src/qcam/viewfinder_qt.h | 2 +- > > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > > test/libtest/buffer_source.cpp | 3 +- > > > test/utils.cpp | 10 +- > > > 45 files changed, 911 insertions(+), 357 deletions(-) > > > create mode 100644 src/cam/image.cpp > > > create mode 100644 src/cam/image.h > > -- > Regards, > > Laurent Pinchart
Hi Laurent, thank you for the patch. On Mon, Sep 6, 2021 at 4:36 PM Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> wrote: > > Hi Laurent, > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > The metadata planes are allocated by V4L2VideoDevice when dequeuing a > > buffer. This causes the metadata planes to only be allocated after a > > buffer gets dequeued, and doesn't provide any strong guarantee that > > their number matches the number of FrameBuffer planes. The lack of this > > invariant makes the FrameBuffer class fragile. > > > > As a first step towards fixing this, allocate the metadata planes when > > the FrameBuffer is constructed. The FrameMetadata API should be further > > improved by preventing a change in the number of planes. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > Changes since v1: > > > > - Return buffer with state set to FrameError on error > > --- > > src/libcamera/framebuffer.cpp | 2 ++ > > src/libcamera/v4l2_videodevice.cpp | 10 +++++----- > > 2 files changed, 7 insertions(+), 5 deletions(-) > > > > diff --git a/src/libcamera/framebuffer.cpp b/src/libcamera/framebuffer.cpp > > index e71c2ffae034..e4f8419a9063 100644 > > --- a/src/libcamera/framebuffer.cpp > > +++ b/src/libcamera/framebuffer.cpp > > @@ -210,6 +210,8 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie) > > : Extensible(std::make_unique<Private>()), planes_(planes), > > cookie_(cookie) > > { > > + metadata_.planes.resize(planes_.size()); > > + > > unsigned int offset = 0; > > bool isContiguous = true; > > ino_t inode = 0; > > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > > index 59aa53c7c27e..e729e608448c 100644 > > --- a/src/libcamera/v4l2_videodevice.cpp > > +++ b/src/libcamera/v4l2_videodevice.cpp > > @@ -1670,7 +1670,6 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > > > > unsigned int numV4l2Planes = multiPlanar ? buf.length : 1; > > FrameMetadata &metadata = buffer->metadata_; > > - metadata.planes.clear(); > > > > if (numV4l2Planes != buffer->planes().size()) { > > /* > > @@ -1700,8 +1699,9 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > > return buffer; > > } > > > > - metadata.planes.push_back({ std::min(plane.length, bytesused) }); > > - bytesused -= metadata.planes.back().bytesused; > > + metadata.planes[i].bytesused = > > + std::min(plane.length, bytesused); > > + bytesused -= metadata.planes[i].bytesused; > > } > > } else if (multiPlanar) { > > /* > > @@ -1710,9 +1710,9 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer() > > * V4L2 buffer is guaranteed to be equal at this point. > > */ > > for (unsigned int i = 0; i < numV4l2Planes; ++i) > > - metadata.planes.push_back({ planes[i].bytesused }); > > + metadata.planes[i].bytesused = planes[i].bytesused; > > } else { > > - metadata.planes.push_back({ buf.bytesused }); > > + metadata.planes[0].bytesused = buf.bytesused; > > } > > > > return buffer; > >
Hi Laurent, On Mon, Sep 6, 2021 at 4:38 PM Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> wrote: > > Hi Laurent, > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > Now that libcamera correctly supports frame buffers with different > > dmabuf for each plane, remove the assumption that a single dmabuf is > > used. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > > --- > > src/android/camera_device.cpp | 25 ++++++------------------- > > 1 file changed, 6 insertions(+), 19 deletions(-) > > > > diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp > > index 8ca76719a50f..c64064106ccc 100644 > > --- a/src/android/camera_device.cpp > > +++ b/src/android/camera_device.cpp > > @@ -749,25 +749,6 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > libcamera::PixelFormat pixelFormat, > > const libcamera::Size &size) > > { > > - FileDescriptor fd; > > - /* > > - * This assumes all the planes are in the same dmabuf. > > - * > > - * \todo Verify that this assumption holds, fstat() can be used to check > > - * if two fds refer to the same dmabuf. > > - */ > > - for (int i = 0; i < camera3buffer->numFds; i++) { > > - if (camera3buffer->data[i] != -1) { > > - fd = FileDescriptor(camera3buffer->data[i]); > > - break; > > - } > > - } > > - > > - if (!fd.isValid()) { > > - LOG(HAL, Fatal) << "No valid fd"; > > - return nullptr; > > - } > > - > > CameraBuffer buf(camera3buffer, pixelFormat, size, PROT_READ); > > if (!buf.isValid()) { > > LOG(HAL, Fatal) << "Failed to create CameraBuffer"; > > @@ -776,6 +757,12 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > > > std::vector<FrameBuffer::Plane> planes(buf.numPlanes()); > > for (size_t i = 0; i < buf.numPlanes(); ++i) { I think it is not guaranteed that numFds is necessarily the same as buf.numPlanes(). -Hiro > > + FileDescriptor fd{ camera3buffer->data[i] }; > > + if (!fd.isValid()) { > > + LOG(HAL, Fatal) << "No valid fd"; > > + return nullptr; > > + } > > + > > planes[i].fd = fd; > > planes[i].offset = buf.offset(i); > > planes[i].length = buf.size(i); > >
Hi Laurent, On Mon, Sep 6, 2021 at 8:55 PM Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hi Jean-Michel, > > On Mon, Sep 06, 2021 at 10:35:50AM +0200, Jean-Michel Hautbois wrote: > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > > The new Image class represents a multi-planar image with direct access > > > to pixel data. It currently duplicates the function of the > > > MappedFrameBuffer class which is internal to libcamera, and will serve > > > as a design playground to improve the API until it is considered ready > > > to be made part of the libcamera public API. > > > > I like the idea, maybe add some documentation already in the class ? > > That's a good idea, but in the specific case I'd like to get this series > merged ASAP to fix the breakage in the master branch, so I'd prefer > adding the documentation on top. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > > --- > > > src/cam/image.cpp | 107 ++++++++++++++++++++++++++++++++++++++++++++ > > > src/cam/image.h | 52 +++++++++++++++++++++ > > > src/cam/meson.build | 1 + > > > 3 files changed, 160 insertions(+) > > > create mode 100644 src/cam/image.cpp > > > create mode 100644 src/cam/image.h > > > > > > diff --git a/src/cam/image.cpp b/src/cam/image.cpp > > > new file mode 100644 > > > index 000000000000..7ae5f52dccb4 > > > --- /dev/null > > > +++ b/src/cam/image.cpp > > > @@ -0,0 +1,107 @@ > > > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > > > +/* > > > + * Copyright (C) 2021, Google Inc. > > > + * > > > + * image.cpp - Multi-planar image with access to pixel data > > > + */ > > > + > > > +#include "image.h" > > > + > > > +#include <assert.h> > > > +#include <errno.h> > > > +#include <iostream> > > > +#include <map> > > > +#include <string.h> > > > +#include <sys/mman.h> > > > +#include <unistd.h> > > > + > > > +using namespace libcamera; > > > + > > > +std::unique_ptr<Image> Image::fromFrameBuffer(const FrameBuffer *buffer, MapMode mode) > > > > Can you see a use case for Image::toFrameBuffer not implemented yet ? > > What would be in this class apart from a conversion from a FrameBuffer > > to a Image ? > > I don't think so. The Image class is an interface to provide access to > pixel data. In its current form it's constructed from a FrameBuffer, but > I'd like the ability to construct it from a byte array as well. This is > related to the MappedVectorBuffer class that Hiro has proposed, it would > allow the JPEG compression in the Android HAL to use an Image as the > source, regardless of whether the compresses the still capture (coming > from libcamera in a FrameBuffer) or the thumbnail (downscaled in > software and stored in a std::vector<uint8_t>). > > What I still haven't determined is whether the Image class should be an > interface with pure virtual functions only, implemented by subclasses > such as FrameBufferImage or Memory Image, or if it should contain the > data as well, populated by the different constructors. > > I've also started to think about how to perform the mapping. For > FrameBuffer objects constructed from Android buffers, the mapping should > be delegated to gralloc on Android and to the CameraBufferManager on > Chrome OS. For FrameBuffer objects constructed internally by the > V4L2VideoDevice (and in particular the ones exposes to applications with > FrameBufferAllocator), the code below should be correct. For other types > of FrameBuffer objects supplied by applications, another method of > mapping may be needed. I'm not sure yet how to best handle that, and if > we'll need a FrameBufferMapper object that FrameBuffer instances will > reference. > > > > +{ > > > + std::unique_ptr<Image> image{ new Image() }; > > > + > > > + assert(!buffer->planes().empty()); > > > + > > > + int mmapFlags = 0; > > > + > > > + if (mode & MapMode::ReadOnly) > > > + mmapFlags |= PROT_READ; > > > + > > > + if (mode & MapMode::WriteOnly) > > > + mmapFlags |= PROT_WRITE; > > > + > > > + struct MappedBufferInfo { > > > + uint8_t *address = nullptr; > > > + size_t mapLength = 0; > > > + size_t dmabufLength = 0; > > > + }; > > > + std::map<int, MappedBufferInfo> mappedBuffers; > > > + > > > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > > > + const int fd = plane.fd.fd(); > > > + if (mappedBuffers.find(fd) == mappedBuffers.end()) { > > > + const size_t length = lseek(fd, 0, SEEK_END); > > > + mappedBuffers[fd] = MappedBufferInfo{ nullptr, 0, length }; > > > + } > > > + > > > + const size_t length = mappedBuffers[fd].dmabufLength; > > > + > > > + if (plane.offset > length || > > > + plane.offset + plane.length > length) { > > > + std::cerr << "plane is out of buffer: buffer length=" > > > + << length << ", plane offset=" << plane.offset > > > + << ", plane length=" << plane.length > > > + << std::endl; > > > + return nullptr; > > > + } > > > + size_t &mapLength = mappedBuffers[fd].mapLength; > > > + mapLength = std::max(mapLength, > > > + static_cast<size_t>(plane.offset + plane.length)); > > > + } > > > + > > > + for (const FrameBuffer::Plane &plane : buffer->planes()) { > > > + const int fd = plane.fd.fd(); > > > + auto &info = mappedBuffers[fd]; > > > + if (!info.address) { > > > + void *address = mmap(nullptr, info.mapLength, mmapFlags, > > > + MAP_SHARED, fd, 0); > > > + if (address == MAP_FAILED) { > > > + int error = -errno; > > > + std::cerr << "Failed to mmap plane: " > > > + << strerror(-error) << std::endl; > > > + return nullptr; > > > + } > > > + > > > + info.address = static_cast<uint8_t *>(address); > > > + image->maps_.emplace_back(info.address, info.mapLength); > > > + } > > > + > > > + image->planes_.emplace_back(info.address + plane.offset, plane.length); > > > + } > > > + > > > > Why are you using two loops on buffer->planes() ? Is it for code clarity > > or something I did not get ? > > Because we may have multiple planes using the same dmabuf fd. The first > look gathers the dmabuf fds along with their length, the second loop > then maps them. We need to compute the length to be mapped by looking at > all planes first, before doing any mapping. > > > > + return image; > > > +} > > > + > > > +Image::Image() = default; > > > + > > > +Image::~Image() > > > +{ > > > + for (Span<uint8_t> &map : maps_) > > > + munmap(map.data(), map.size()); > > > +} > > > + > > > +unsigned int Image::numPlanes() const > > > +{ > > > + return planes_.size(); > > > +} > > > + > > > +Span<uint8_t> Image::data(unsigned int plane) > > > +{ > > > + return planes_[plane]; > > > +} > > > + > > > +Span<const uint8_t> Image::data(unsigned int plane) const > > > +{ > > > + return planes_[plane]; > > > +} > > > diff --git a/src/cam/image.h b/src/cam/image.h > > > new file mode 100644 > > > index 000000000000..1ce5f84e5f9e > > > --- /dev/null > > > +++ b/src/cam/image.h > > > @@ -0,0 +1,52 @@ > > > +/* SPDX-License-Identifier: LGPL-2.1-or-later */ > > > +/* > > > + * Copyright (C) 2021, Google Inc. > > > + * > > > + * image.h - Multi-planar image with access to pixel data > > > + */ > > > +#ifndef __CAM_IMAGE_H__ > > > +#define __CAM_IMAGE_H__ > > > + > > > +#include <memory> > > > +#include <stdint.h> > > > +#include <vector> > > > + > > > +#include <libcamera/base/class.h> > > > +#include <libcamera/base/flags.h> > > > +#include <libcamera/base/span.h> > > > + > > > +#include <libcamera/framebuffer.h> > > > + > > > +class Image > > > +{ > > > +public: > > > + enum class MapMode { > > > + ReadOnly = 1 << 0, > > > + WriteOnly = 1 << 1, > > > + ReadWrite = ReadOnly | WriteOnly, > > > + }; > > > + > > > + static std::unique_ptr<Image> fromFrameBuffer(const libcamera::FrameBuffer *buffer, > > > + MapMode mode); > > > + > > > + ~Image(); > > > + > > > + unsigned int numPlanes() const; > > > + > > > + libcamera::Span<uint8_t> data(unsigned int plane); > > > + libcamera::Span<const uint8_t> data(unsigned int plane) const; > > > + > > > +private: > > > + LIBCAMERA_DISABLE_COPY(Image) > > > + > > > + Image(); > > > + > > > + std::vector<libcamera::Span<uint8_t>> maps_; > > > + std::vector<libcamera::Span<uint8_t>> planes_; > > > +}; > > > + > > > +namespace libcamera { > > > +LIBCAMERA_FLAGS_ENABLE_OPERATORS(Image::MapMode) > > > +} > > > + > > > +#endif /* __CAM_IMAGE_H__ */ > > > diff --git a/src/cam/meson.build b/src/cam/meson.build > > > index ea36aaa5c514..e8e2ae57d3f4 100644 > > > --- a/src/cam/meson.build > > > +++ b/src/cam/meson.build > > > @@ -14,6 +14,7 @@ cam_sources = files([ > > > 'event_loop.cpp', > > > 'file_sink.cpp', > > > 'frame_sink.cpp', > > > + 'image.cpp', > > > 'main.cpp', > > > 'options.cpp', > > > 'stream_options.cpp', > > > > > -- > Regards, > > Laurent Pinchart
Hi Laurent, thank you for the patch. On Mon, Sep 6, 2021 at 6:27 PM Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> wrote: > > Hi Laurent, > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > Replace the manual implementation of frame buffer mapping with the Image > > class to improve code sharing. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > It improves reading indeed ! > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > src/cam/file_sink.cpp | 42 +++++++++++++----------------------------- > > src/cam/file_sink.h | 6 ++++-- > > 2 files changed, 17 insertions(+), 31 deletions(-) > > > > diff --git a/src/cam/file_sink.cpp b/src/cam/file_sink.cpp > > index 0fc7d621f50b..3c2e565b27a2 100644 > > --- a/src/cam/file_sink.cpp > > +++ b/src/cam/file_sink.cpp > > @@ -5,17 +5,18 @@ > > * file_sink.cpp - File Sink > > */ > > > > +#include <assert.h> > > #include <fcntl.h> > > #include <iomanip> > > #include <iostream> > > #include <sstream> > > #include <string.h> > > -#include <sys/mman.h> > > #include <unistd.h> > > > > #include <libcamera/camera.h> > > > > #include "file_sink.h" > > +#include "image.h" > > > > using namespace libcamera; > > > > @@ -26,12 +27,6 @@ FileSink::FileSink(const std::string &pattern) > > > > FileSink::~FileSink() > > { > > - for (auto &iter : mappedBuffers_) { > > - void *memory = iter.second.first; > > - unsigned int length = iter.second.second; > > - munmap(memory, length); > > - } > > - mappedBuffers_.clear(); > > } > > > > int FileSink::configure(const libcamera::CameraConfiguration &config) > > @@ -51,23 +46,11 @@ int FileSink::configure(const libcamera::CameraConfiguration &config) > > > > void FileSink::mapBuffer(FrameBuffer *buffer) > > { > > - /* \todo use MappedFrameBuffer. */ > > - for (const FrameBuffer::Plane &plane : buffer->planes()) { > > - const int fd = plane.fd.fd(); > > - if (mappedBuffers_.find(fd) == mappedBuffers_.end()) { > > - /** > > - * \todo Should we try to only map the portions of the > > - * dmabuf that are used by planes ? > > - */ > > - size_t length = lseek(fd, 0, SEEK_END); > > - void *memory = mmap(NULL, plane.length, PROT_READ, > > - MAP_SHARED, fd, 0); > > - mappedBuffers_[fd] = std::make_pair(memory, length); > > - } > > + std::unique_ptr<Image> image = > > + Image::fromFrameBuffer(buffer, Image::MapMode::ReadOnly); > > + assert(image != nullptr); > > > > - void *memory = mappedBuffers_[fd].first; > > - planeData_[&plane] = static_cast<uint8_t *>(memory) + plane.offset; > > - } > > + mappedBuffers_[buffer] = std::move(image); > > } > > > > bool FileSink::processRequest(Request *request) > > @@ -108,19 +91,20 @@ void FileSink::writeBuffer(const Stream *stream, FrameBuffer *buffer) > > return; > > } > > > > + Image *image = mappedBuffers_[buffer].get(); > > + > > for (unsigned int i = 0; i < buffer->planes().size(); ++i) { > > - const FrameBuffer::Plane &plane = buffer->planes()[i]; > > const FrameMetadata::Plane &meta = buffer->metadata().planes()[i]; > > > > - uint8_t *data = planeData_[&plane]; > > - unsigned int length = std::min(meta.bytesused, plane.length); > > + Span<uint8_t> data = image->data(i); > > + unsigned int length = std::min<unsigned int>(meta.bytesused, data.size()); > > > > - if (meta.bytesused > plane.length) > > + if (meta.bytesused > data.size()) > > std::cerr << "payload size " << meta.bytesused > > - << " larger than plane size " << plane.length > > + << " larger than plane size " << data.size() > > << std::endl; > > > > - ret = ::write(fd, data, length); > > + ret = ::write(fd, data.data(), length); > > if (ret < 0) { > > ret = -errno; > > std::cerr << "write error: " << strerror(-ret) > > diff --git a/src/cam/file_sink.h b/src/cam/file_sink.h > > index c12325d955c5..335be93b8732 100644 > > --- a/src/cam/file_sink.h > > +++ b/src/cam/file_sink.h > > @@ -8,12 +8,15 @@ > > #define __CAM_FILE_SINK_H__ > > > > #include <map> > > +#include <memory> > > #include <string> > > > > #include <libcamera/stream.h> > > > > #include "frame_sink.h" > > > > +class Image; > > + > > class FileSink : public FrameSink > > { > > public: > > @@ -32,8 +35,7 @@ private: > > > > std::map<const libcamera::Stream *, std::string> streamNames_; > > std::string pattern_; > > - std::map<int, std::pair<void *, unsigned int>> mappedBuffers_; > > - std::map<const libcamera::FrameBuffer::Plane *, uint8_t *> planeData_; > > + std::map<libcamera::FrameBuffer *, std::unique_ptr<Image>> mappedBuffers_; > > }; > > > > #endif /* __CAM_FILE_SINK_H__ */ > >
Hi Hiro, On Mon, Sep 06, 2021 at 10:35:46PM +0900, Hirokazu Honda wrote: > On Mon, Sep 6, 2021 at 4:38 PM Jean-Michel Hautbois wrote: > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > > Now that libcamera correctly supports frame buffers with different > > > dmabuf for each plane, remove the assumption that a single dmabuf is > > > used. > > > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > > > --- > > > src/android/camera_device.cpp | 25 ++++++------------------- > > > 1 file changed, 6 insertions(+), 19 deletions(-) > > > > > > diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp > > > index 8ca76719a50f..c64064106ccc 100644 > > > --- a/src/android/camera_device.cpp > > > +++ b/src/android/camera_device.cpp > > > @@ -749,25 +749,6 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > > libcamera::PixelFormat pixelFormat, > > > const libcamera::Size &size) > > > { > > > - FileDescriptor fd; > > > - /* > > > - * This assumes all the planes are in the same dmabuf. > > > - * > > > - * \todo Verify that this assumption holds, fstat() can be used to check > > > - * if two fds refer to the same dmabuf. > > > - */ > > > - for (int i = 0; i < camera3buffer->numFds; i++) { > > > - if (camera3buffer->data[i] != -1) { > > > - fd = FileDescriptor(camera3buffer->data[i]); > > > - break; > > > - } > > > - } > > > - > > > - if (!fd.isValid()) { > > > - LOG(HAL, Fatal) << "No valid fd"; > > > - return nullptr; > > > - } > > > - > > > CameraBuffer buf(camera3buffer, pixelFormat, size, PROT_READ); > > > if (!buf.isValid()) { > > > LOG(HAL, Fatal) << "Failed to create CameraBuffer"; > > > @@ -776,6 +757,12 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > > > > > std::vector<FrameBuffer::Plane> planes(buf.numPlanes()); > > > for (size_t i = 0; i < buf.numPlanes(); ++i) { > > I think it is not guaranteed that numFds is necessarily the same as > buf.numPlanes(). Oh ? What's the expected behaviour then ? Is there any documentation about all this ? > > > + FileDescriptor fd{ camera3buffer->data[i] }; > > > + if (!fd.isValid()) { > > > + LOG(HAL, Fatal) << "No valid fd"; > > > + return nullptr; > > > + } > > > + > > > planes[i].fd = fd; > > > planes[i].offset = buf.offset(i); > > > planes[i].length = buf.size(i); > > >
Hi Hiro, On Mon, Sep 06, 2021 at 10:06:55PM +0900, Hirokazu Honda wrote: > On Mon, Sep 6, 2021 at 11:01 AM Laurent Pinchart wrote: > > > > When queueing a buffer to a V4L2VideoDevice, the number of planes in the > > FrameBuffer may not match the number of V4L2 buffer planes if the > > PixelFormat is multi-planar (has multiple colour planes) and the V4L2 > > format is single-planar (has a single buffer plane). In this case, we > > need to coalesce all FrameBuffer planes into a single V4L2 buffer plane. > > Do so, and add validity checks to reject frame buffers that can't be > > described using a single V4L2 buffer plane. > > > > This change prepares for proper multi-planar support, but isn't expected > > to result in a change of behaviour with existing pipeline handlers, as > > none of them queue an output buffer with multiple FrameBuffer planes or > > use non-contiguous buffers for either capture or output. > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> > > Reviewed-by: Hirokazu Honda <hiroh@chromium.org> > > --- > > Changes since v1: > > > > - Make numV4l2Planes const > > - Use format_.planesCount > > --- > > src/libcamera/v4l2_videodevice.cpp | 67 +++++++++++++++++++++++++----- > > 1 file changed, 57 insertions(+), 10 deletions(-) > > > > diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp > > index c6c9263c49e9..625d5da40337 100644 > > --- a/src/libcamera/v4l2_videodevice.cpp > > +++ b/src/libcamera/v4l2_videodevice.cpp > > @@ -22,10 +22,12 @@ > > > > #include <libcamera/base/event_notifier.h> > > #include <libcamera/base/log.h> > > +#include <libcamera/base/utils.h> > > > > #include <libcamera/file_descriptor.h> > > > > #include "libcamera/internal/formats.h" > > +#include "libcamera/internal/framebuffer.h" > > #include "libcamera/internal/media_device.h" > > #include "libcamera/internal/media_object.h" > > > > @@ -1496,10 +1498,20 @@ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer) > > > > bool multiPlanar = V4L2_TYPE_IS_MULTIPLANAR(buf.type); > > const std::vector<FrameBuffer::Plane> &planes = buffer->planes(); > > + const unsigned int numV4l2Planes = format_.planesCount; > > + > > + /* > > + * If the frame buffer has multiple planes and the V4L2 format requires > > + * contiguous planes, ensure that's the case. > > + */ > > + if (planes.size() != numV4l2Planes && !buffer->_d()->isContiguous()) { > > + LOG(V4L2, Error) << "Device format requires contiguous buffer"; > > + return -EINVAL; > > + } > > nit: shall we also check planes.size() >= numV4l2Planes? It's a good idea, I'll do so. > > > > if (buf.memory == V4L2_MEMORY_DMABUF) { > > if (multiPlanar) { > > - for (unsigned int p = 0; p < planes.size(); ++p) > > + for (unsigned int p = 0; p < numV4l2Planes; ++p) > > v4l2Planes[p].m.fd = planes[p].fd.fd(); > > } else { > > buf.m.fd = planes[0].fd.fd(); > > @@ -1507,23 +1519,58 @@ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer) > > } > > > > if (multiPlanar) { > > - buf.length = planes.size(); > > + buf.length = numV4l2Planes; > > buf.m.planes = v4l2Planes; > > } > > > > if (V4L2_TYPE_IS_OUTPUT(buf.type)) { > > const FrameMetadata &metadata = buffer->metadata(); > > > > - if (multiPlanar) { > > - unsigned int nplane = 0; > > - for (const FrameMetadata::Plane &plane : metadata.planes) { > > - v4l2Planes[nplane].bytesused = plane.bytesused; > > - v4l2Planes[nplane].length = buffer->planes()[nplane].length; > > - nplane++; > > + if (numV4l2Planes != planes.size()) { > > + /* > > + * If we have a multi-planar buffer with a V4L2 > > + * single-planar format, coalesce all planes. The length > > + * and number of bytes used may only differ in the last > > + * plane as any other situation can't be represented. > > + */ > > + unsigned int bytesused = 0; > > + unsigned int length = 0; > > + > > + for (auto [i, plane] : utils::enumerate(planes)) { > > + bytesused += metadata.planes[i].bytesused; > > + length += plane.length; > > + > > + if (i != planes.size() - 1 && bytesused != length) { > > + LOG(V4L2, Error) > > + << "Holes in multi-planar buffer not supported"; > > + return -EINVAL; > > + } > > + } > > + > > + if (multiPlanar) { > > + v4l2Planes[0].bytesused = bytesused; > > + v4l2Planes[0].length = length; > > + } else { > > + buf.bytesused = bytesused; > > + buf.length = length; > > + } > > + } else if (multiPlanar) { > > + /* > > + * If we use the multi-planar API, fill in the planes. > > + * The number of planes in the frame buffer and in the > > + * V4L2 buffer is guaranteed to be equal at this point. > > + */ > > + for (auto [i, plane] : utils::enumerate(planes)) { > > + v4l2Planes[i].bytesused = metadata.planes[i].bytesused; > > + v4l2Planes[i].length = plane.length; > > } > > } else { > > - if (metadata.planes.size()) > > - buf.bytesused = metadata.planes[0].bytesused; > > + /* > > + * Single-planar API with a single plane in the buffer > > + * is trivial to handle. > > + */ > > + buf.bytesused = metadata.planes[0].bytesused; > > + buf.length = planes[0].length; > > } > > > > buf.sequence = metadata.sequence;
Hi David, On Mon, Sep 06, 2021 at 02:13:58PM +0100, David Plowman wrote: > On Mon, 6 Sept 2021 at 13:14, Laurent Pinchart wrote: > > On Mon, Sep 06, 2021 at 12:22:25PM +0100, David Plowman wrote: > > > Hi Laurent, everyone > > > > > > Thanks for all the efforts to get this working! I had just a little > > > question or two... > > > > > > 1. Is it easy to tell if a FrameBuffer is actually single plane or > > > multi plane? If not, could we add a public API function that would > > > tell us? > > > > You can use FrameBuffer::planes().size() to get the number of planes. Or > > did you mean checking if the different planes in a multi-planar frame > > buffer are stored contiguously in the same dmabuf ? There's a private > > helper for that (FrameBuffer::Private::isContiguous()), I haven't made > > it public yet as I wanted to evaluate the use cases first. > > Yes, isContiguous() sounds pretty much like what I want. I'm > interested in functions that make it more convenient for me to know > how I pass a buffer (for example) to my V4L2 h.264 encoder. I wish the V4L2 multi-planar support had been better designed. It's painful :-S > > > 2. Is it easy to get the full size of the buffer for the single plane > > > case (rather than having to add all the bits up)? And again, if the > > > answer is no, could we add such a thing? > > > > > > I'm thinking of trying to make life easy for applications that might > > > want to pass these buffers to codecs where the driver might only > > > support single planes. Not thinking of any platform in particular... > > > :) > > > > It again depends what you mean :-) If the FrameBuffer has a single > > plane, FrameBuffer::planes()[0].length (and > > FrameMetaData::planes()[0].bytesused) will give you what you need. I > > suspect you're however consider the case of a multi-planar FrameBuffer > > with planes stored contiguously in memory, using the single-planar V4L2 > > formats (e.g. V4L2_PIX_FMT_NV12, as opposed to V4L2_PIX_FMT_NV12M). I'm > > a bit worried that a helper function in that case would be used by > > applications to ignore that the buffer can be truly multi-planar. > > Perhaps we could define a "safe" version of the function that > complains if it was called on a truly multi planar buffer? It might > return zero, or print a warning to the console - would that help? Is [0] what you're essentially looking for ? How would you like to handle bytesused vs. length ? On a side note, I'm increasingly starting to dislike bytesused, as it should only differ from length for compressed formats. Having redundant information in the most common case isn't nice. [0] https://git.libcamera.org/libcamera/pinchartl/libcamera.git/tree/src/libcamera/v4l2_videodevice.cpp?h=fb/offset&id=f46dffea24149ceaf14e12432db97aed116ac0d4#n1530 > > > On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart wrote: > > > > > > > > Hello everybody, > > > > > > > > This patch series started as an investigation of a qcam failure with > > > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > > > qcam would be possible, I decided to instead address the core issue and > > > > fix it in V4L2VideoDevice. > > > > > > > > Compared to v1, the series now includes fixes for cam and qcam in > > > > addition to the changes needed in the libcamera core. They have been > > > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > > > > > The GStreamer element seems to work fine without any change required. > > > > The V4L2 compatibility layer is still broken, and I haven't tested the > > > > Android HAL yet (any volunteer ?). > > > > > > > > The most important change is in patches 13/27 and 14/27, which translate > > > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > > > It's working more or less by chance at the moment (except in qcam where > > > > it's broken, and possibly in other places I haven't tested). Patches > > > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > > > issue in the Android HAL. Worth being noted is patch 19/27 that > > > > introduces an Image class shared by cam and qcam. The class duplicates > > > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > > > rework MappedFrameBuffer into something I would be happy to see in the > > > > public API, but failed to do so in a reasonable amount of time, and I > > > > didn't want to delay this important regression fix. > > > > > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > > > Addition of NV12 support to the vimc kernel driver would be very nice, > > > > in order to test multi-planar support in our unit tests. Volunteers are > > > > welcome ;-) > > > > > > > > Laurent Pinchart (27): > > > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > > > libcamera: file_descriptor: Add a function to retrieve the inode > > > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > > > libcamera: formats: Move plane info structure to PixelFormatInfo > > > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > > > libcamera: formats: Support V4L2 non-contiguous formats > > > > libcamera: framebuffer: Move planes check to constructor > > > > libcamera: framebuffer: Add a function to check if planes are > > > > contiguous > > > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > > > libcamera: v4l2_videodevice: Take stride into account to compute > > > > offsets > > > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > > > libcamera: v4l2_videodevice: Use utils::enumerate() > > > > libcamera: framebuffer: Allocate metadata planes at construction time > > > > libcamera: framebuffer: Prevent modifying the number of metadata > > > > planes > > > > android: camera_device: Don't assume all planes use the same fd > > > > cam: Add Image class > > > > cam: file_sink: Use Image class to access pixel data > > > > cam: drm: Support per-plane stride values > > > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > > > cam: drm: Avoid importing the same dmabuf multiple times > > > > qcam: Print bytesused for all planes > > > > qcam: Use Image class to access pixel data > > > > qcam: viewfinder_gl: Support multi-planar buffers > > > > qcam: viewfinder_qt: Support multi-planar buffers > > > > > > > > include/libcamera/base/utils.h | 4 +- > > > > include/libcamera/file_descriptor.h | 3 + > > > > include/libcamera/framebuffer.h | 19 +- > > > > include/libcamera/internal/formats.h | 22 +- > > > > include/libcamera/internal/framebuffer.h | 2 + > > > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > > > src/android/camera_device.cpp | 25 +- > > > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > > > src/cam/camera_session.cpp | 4 +- > > > > src/cam/drm.cpp | 38 +- > > > > src/cam/drm.h | 7 +- > > > > src/cam/file_sink.cpp | 44 +-- > > > > src/cam/file_sink.h | 6 +- > > > > src/cam/image.cpp | 107 +++++ > > > > src/cam/image.h | 52 +++ > > > > src/cam/kms_sink.cpp | 28 +- > > > > src/cam/meson.build | 1 + > > > > src/libcamera/file_descriptor.cpp | 26 ++ > > > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > > > src/libcamera/framebuffer.cpp | 57 ++- > > > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > > > src/qcam/format_converter.cpp | 18 +- > > > > src/qcam/format_converter.h | 9 +- > > > > src/qcam/main_window.cpp | 38 +- > > > > src/qcam/main_window.h | 4 +- > > > > src/qcam/meson.build | 1 + > > > > src/qcam/viewfinder.h | 6 +- > > > > src/qcam/viewfinder_gl.cpp | 45 +-- > > > > src/qcam/viewfinder_gl.h | 4 +- > > > > src/qcam/viewfinder_qt.cpp | 20 +- > > > > src/qcam/viewfinder_qt.h | 2 +- > > > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > > > test/libtest/buffer_source.cpp | 3 +- > > > > test/utils.cpp | 10 +- > > > > 45 files changed, 911 insertions(+), 357 deletions(-) > > > > create mode 100644 src/cam/image.cpp > > > > create mode 100644 src/cam/image.h
Hi Laurent, On Mon, Sep 6, 2021 at 10:53 PM Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hi Hiro, > > On Mon, Sep 06, 2021 at 10:35:46PM +0900, Hirokazu Honda wrote: > > On Mon, Sep 6, 2021 at 4:38 PM Jean-Michel Hautbois wrote: > > > On 06/09/2021 04:00, Laurent Pinchart wrote: > > > > Now that libcamera correctly supports frame buffers with different > > > > dmabuf for each plane, remove the assumption that a single dmabuf is > > > > used. > > > > > > > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> > > > > Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> > > > > --- > > > > src/android/camera_device.cpp | 25 ++++++------------------- > > > > 1 file changed, 6 insertions(+), 19 deletions(-) > > > > > > > > diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp > > > > index 8ca76719a50f..c64064106ccc 100644 > > > > --- a/src/android/camera_device.cpp > > > > +++ b/src/android/camera_device.cpp > > > > @@ -749,25 +749,6 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > > > libcamera::PixelFormat pixelFormat, > > > > const libcamera::Size &size) > > > > { > > > > - FileDescriptor fd; > > > > - /* > > > > - * This assumes all the planes are in the same dmabuf. > > > > - * > > > > - * \todo Verify that this assumption holds, fstat() can be used to check > > > > - * if two fds refer to the same dmabuf. > > > > - */ > > > > - for (int i = 0; i < camera3buffer->numFds; i++) { > > > > - if (camera3buffer->data[i] != -1) { > > > > - fd = FileDescriptor(camera3buffer->data[i]); > > > > - break; > > > > - } > > > > - } > > > > - > > > > - if (!fd.isValid()) { > > > > - LOG(HAL, Fatal) << "No valid fd"; > > > > - return nullptr; > > > > - } > > > > - > > > > CameraBuffer buf(camera3buffer, pixelFormat, size, PROT_READ); > > > > if (!buf.isValid()) { > > > > LOG(HAL, Fatal) << "Failed to create CameraBuffer"; > > > > @@ -776,6 +757,12 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer > > > > > > > > std::vector<FrameBuffer::Plane> planes(buf.numPlanes()); > > > > for (size_t i = 0; i < buf.numPlanes(); ++i) { > > > > I think it is not guaranteed that numFds is necessarily the same as > > buf.numPlanes(). > > Oh ? What's the expected behaviour then ? Is there any documentation > about all this ? I could not find the documentation about it. Maybe they must be all the same or at least larther than num planes. In chromeos, numFds looks always four and fills the same number of fds as num planes. So I think this code is fine. -Hiro > > > > > + FileDescriptor fd{ camera3buffer->data[i] }; > > > > + if (!fd.isValid()) { > > > > + LOG(HAL, Fatal) << "No valid fd"; > > > > + return nullptr; > > > > + } > > > > + > > > > planes[i].fd = fd; > > > > planes[i].offset = buf.offset(i); > > > > planes[i].length = buf.size(i); > > > > > > -- > Regards, > > Laurent Pinchart
Hi Laurent On Mon, 6 Sept 2021 at 15:16, Laurent Pinchart <laurent.pinchart@ideasonboard.com> wrote: > > Hi David, > > On Mon, Sep 06, 2021 at 02:13:58PM +0100, David Plowman wrote: > > On Mon, 6 Sept 2021 at 13:14, Laurent Pinchart wrote: > > > On Mon, Sep 06, 2021 at 12:22:25PM +0100, David Plowman wrote: > > > > Hi Laurent, everyone > > > > > > > > Thanks for all the efforts to get this working! I had just a little > > > > question or two... > > > > > > > > 1. Is it easy to tell if a FrameBuffer is actually single plane or > > > > multi plane? If not, could we add a public API function that would > > > > tell us? > > > > > > You can use FrameBuffer::planes().size() to get the number of planes. Or > > > did you mean checking if the different planes in a multi-planar frame > > > buffer are stored contiguously in the same dmabuf ? There's a private > > > helper for that (FrameBuffer::Private::isContiguous()), I haven't made > > > it public yet as I wanted to evaluate the use cases first. > > > > Yes, isContiguous() sounds pretty much like what I want. I'm > > interested in functions that make it more convenient for me to know > > how I pass a buffer (for example) to my V4L2 h.264 encoder. > > I wish the V4L2 multi-planar support had been better designed. It's > painful :-S > > > > > 2. Is it easy to get the full size of the buffer for the single plane > > > > case (rather than having to add all the bits up)? And again, if the > > > > answer is no, could we add such a thing? > > > > > > > > I'm thinking of trying to make life easy for applications that might > > > > want to pass these buffers to codecs where the driver might only > > > > support single planes. Not thinking of any platform in particular... > > > > :) > > > > > > It again depends what you mean :-) If the FrameBuffer has a single > > > plane, FrameBuffer::planes()[0].length (and > > > FrameMetaData::planes()[0].bytesused) will give you what you need. I > > > suspect you're however consider the case of a multi-planar FrameBuffer > > > with planes stored contiguously in memory, using the single-planar V4L2 > > > formats (e.g. V4L2_PIX_FMT_NV12, as opposed to V4L2_PIX_FMT_NV12M). I'm > > > a bit worried that a helper function in that case would be used by > > > applications to ignore that the buffer can be truly multi-planar. > > > > Perhaps we could define a "safe" version of the function that > > complains if it was called on a truly multi planar buffer? It might > > return zero, or print a warning to the console - would that help? > > Is [0] what you're essentially looking for ? How would you like to > handle bytesused vs. length ? Yes, I think that looks right. I guess we could end up in a situation where the last "plane" (in a single plane buffer) would carry any extra bytes that aren't really needed, but that seems OK. The rule is always that length can be bigger than you really need. But obviously it must always be at least as big as the hardware required to write out that plane (whether part of a single plane, or proper mult plane). That important thing is that it's easy to get back the "true" buffer sizes. > > On a side note, I'm increasingly starting to dislike bytesused, as it > should only differ from length for compressed formats. Having redundant > information in the most common case isn't nice. Sounds right to me. I don't think I ever pay it any attention except when dealing with the h.264 encoder. Thanks! David > > [0] https://git.libcamera.org/libcamera/pinchartl/libcamera.git/tree/src/libcamera/v4l2_videodevice.cpp?h=fb/offset&id=f46dffea24149ceaf14e12432db97aed116ac0d4#n1530 > > > > > On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart wrote: > > > > > > > > > > Hello everybody, > > > > > > > > > > This patch series started as an investigation of a qcam failure with > > > > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > > > > qcam would be possible, I decided to instead address the core issue and > > > > > fix it in V4L2VideoDevice. > > > > > > > > > > Compared to v1, the series now includes fixes for cam and qcam in > > > > > addition to the changes needed in the libcamera core. They have been > > > > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > > > > > > > The GStreamer element seems to work fine without any change required. > > > > > The V4L2 compatibility layer is still broken, and I haven't tested the > > > > > Android HAL yet (any volunteer ?). > > > > > > > > > > The most important change is in patches 13/27 and 14/27, which translate > > > > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > > > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > > > > It's working more or less by chance at the moment (except in qcam where > > > > > it's broken, and possibly in other places I haven't tested). Patches > > > > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > > > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > > > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > > > > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > > > > issue in the Android HAL. Worth being noted is patch 19/27 that > > > > > introduces an Image class shared by cam and qcam. The class duplicates > > > > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > > > > rework MappedFrameBuffer into something I would be happy to see in the > > > > > public API, but failed to do so in a reasonable amount of time, and I > > > > > didn't want to delay this important regression fix. > > > > > > > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > > > > Addition of NV12 support to the vimc kernel driver would be very nice, > > > > > in order to test multi-planar support in our unit tests. Volunteers are > > > > > welcome ;-) > > > > > > > > > > Laurent Pinchart (27): > > > > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > > > > libcamera: file_descriptor: Add a function to retrieve the inode > > > > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > > > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > > > > libcamera: formats: Move plane info structure to PixelFormatInfo > > > > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > > > > libcamera: formats: Support V4L2 non-contiguous formats > > > > > libcamera: framebuffer: Move planes check to constructor > > > > > libcamera: framebuffer: Add a function to check if planes are > > > > > contiguous > > > > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > > > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > > > > libcamera: v4l2_videodevice: Take stride into account to compute > > > > > offsets > > > > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > > > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > > > > libcamera: v4l2_videodevice: Use utils::enumerate() > > > > > libcamera: framebuffer: Allocate metadata planes at construction time > > > > > libcamera: framebuffer: Prevent modifying the number of metadata > > > > > planes > > > > > android: camera_device: Don't assume all planes use the same fd > > > > > cam: Add Image class > > > > > cam: file_sink: Use Image class to access pixel data > > > > > cam: drm: Support per-plane stride values > > > > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > > > > cam: drm: Avoid importing the same dmabuf multiple times > > > > > qcam: Print bytesused for all planes > > > > > qcam: Use Image class to access pixel data > > > > > qcam: viewfinder_gl: Support multi-planar buffers > > > > > qcam: viewfinder_qt: Support multi-planar buffers > > > > > > > > > > include/libcamera/base/utils.h | 4 +- > > > > > include/libcamera/file_descriptor.h | 3 + > > > > > include/libcamera/framebuffer.h | 19 +- > > > > > include/libcamera/internal/formats.h | 22 +- > > > > > include/libcamera/internal/framebuffer.h | 2 + > > > > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > > > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > > > > src/android/camera_device.cpp | 25 +- > > > > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > > > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > > > > src/cam/camera_session.cpp | 4 +- > > > > > src/cam/drm.cpp | 38 +- > > > > > src/cam/drm.h | 7 +- > > > > > src/cam/file_sink.cpp | 44 +-- > > > > > src/cam/file_sink.h | 6 +- > > > > > src/cam/image.cpp | 107 +++++ > > > > > src/cam/image.h | 52 +++ > > > > > src/cam/kms_sink.cpp | 28 +- > > > > > src/cam/meson.build | 1 + > > > > > src/libcamera/file_descriptor.cpp | 26 ++ > > > > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > > > > src/libcamera/framebuffer.cpp | 57 ++- > > > > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > > > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > > > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > > > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > > > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > > > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > > > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > > > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > > > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > > > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > > > > src/qcam/format_converter.cpp | 18 +- > > > > > src/qcam/format_converter.h | 9 +- > > > > > src/qcam/main_window.cpp | 38 +- > > > > > src/qcam/main_window.h | 4 +- > > > > > src/qcam/meson.build | 1 + > > > > > src/qcam/viewfinder.h | 6 +- > > > > > src/qcam/viewfinder_gl.cpp | 45 +-- > > > > > src/qcam/viewfinder_gl.h | 4 +- > > > > > src/qcam/viewfinder_qt.cpp | 20 +- > > > > > src/qcam/viewfinder_qt.h | 2 +- > > > > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > > > > test/libtest/buffer_source.cpp | 3 +- > > > > > test/utils.cpp | 10 +- > > > > > 45 files changed, 911 insertions(+), 357 deletions(-) > > > > > create mode 100644 src/cam/image.cpp > > > > > create mode 100644 src/cam/image.h > > -- > Regards, > > Laurent Pinchart
Hi David, On Mon, Sep 06, 2021 at 06:22:10PM +0100, David Plowman wrote: > On Mon, 6 Sept 2021 at 15:16, Laurent Pinchart wrote: > > On Mon, Sep 06, 2021 at 02:13:58PM +0100, David Plowman wrote: > > > On Mon, 6 Sept 2021 at 13:14, Laurent Pinchart wrote: > > > > On Mon, Sep 06, 2021 at 12:22:25PM +0100, David Plowman wrote: > > > > > Hi Laurent, everyone > > > > > > > > > > Thanks for all the efforts to get this working! I had just a little > > > > > question or two... > > > > > > > > > > 1. Is it easy to tell if a FrameBuffer is actually single plane or > > > > > multi plane? If not, could we add a public API function that would > > > > > tell us? > > > > > > > > You can use FrameBuffer::planes().size() to get the number of planes. Or > > > > did you mean checking if the different planes in a multi-planar frame > > > > buffer are stored contiguously in the same dmabuf ? There's a private > > > > helper for that (FrameBuffer::Private::isContiguous()), I haven't made > > > > it public yet as I wanted to evaluate the use cases first. > > > > > > Yes, isContiguous() sounds pretty much like what I want. I'm > > > interested in functions that make it more convenient for me to know > > > how I pass a buffer (for example) to my V4L2 h.264 encoder. > > > > I wish the V4L2 multi-planar support had been better designed. It's > > painful :-S > > > > > > > 2. Is it easy to get the full size of the buffer for the single plane > > > > > case (rather than having to add all the bits up)? And again, if the > > > > > answer is no, could we add such a thing? > > > > > > > > > > I'm thinking of trying to make life easy for applications that might > > > > > want to pass these buffers to codecs where the driver might only > > > > > support single planes. Not thinking of any platform in particular... > > > > > :) > > > > > > > > It again depends what you mean :-) If the FrameBuffer has a single > > > > plane, FrameBuffer::planes()[0].length (and > > > > FrameMetaData::planes()[0].bytesused) will give you what you need. I > > > > suspect you're however consider the case of a multi-planar FrameBuffer > > > > with planes stored contiguously in memory, using the single-planar V4L2 > > > > formats (e.g. V4L2_PIX_FMT_NV12, as opposed to V4L2_PIX_FMT_NV12M). I'm > > > > a bit worried that a helper function in that case would be used by > > > > applications to ignore that the buffer can be truly multi-planar. > > > > > > Perhaps we could define a "safe" version of the function that > > > complains if it was called on a truly multi planar buffer? It might > > > return zero, or print a warning to the console - would that help? > > > > Is [0] what you're essentially looking for ? How would you like to > > handle bytesused vs. length ? > > Yes, I think that looks right. I guess we could end up in a situation > where the last "plane" (in a single plane buffer) would carry any > extra bytes that aren't really needed, but that seems OK. The rule is > always that length can be bigger than you really need. But obviously > it must always be at least as big as the hardware required to write > out that plane (whether part of a single plane, or proper mult plane). > That important thing is that it's easy to get back the "true" buffer > sizes. > > > On a side note, I'm increasingly starting to dislike bytesused, as it > > should only differ from length for compressed formats. Having redundant > > information in the most common case isn't nice. > > Sounds right to me. I don't think I ever pay it any attention except > when dealing with the h.264 encoder. By the way, is the hardware for the ISP and the H.264 encoder restricted to contiguous NV12 planes, or does it support disjoint planes ? > > [0] https://git.libcamera.org/libcamera/pinchartl/libcamera.git/tree/src/libcamera/v4l2_videodevice.cpp?h=fb/offset&id=f46dffea24149ceaf14e12432db97aed116ac0d4#n1530 > > > > > > > On Mon, 6 Sept 2021 at 03:01, Laurent Pinchart wrote: > > > > > > > > > > > > Hello everybody, > > > > > > > > > > > > This patch series started as an investigation of a qcam failure with > > > > > > IPU3 after the merge of the FrameBuffer offset support. While a hack in > > > > > > qcam would be possible, I decided to instead address the core issue and > > > > > > fix it in V4L2VideoDevice. > > > > > > > > > > > > Compared to v1, the series now includes fixes for cam and qcam in > > > > > > addition to the changes needed in the libcamera core. They have been > > > > > > tested with the Raspberry Pi, IPU3, VIMC and UVC pipeline handlers. > > > > > > > > > > > > The GStreamer element seems to work fine without any change required. > > > > > > The V4L2 compatibility layer is still broken, and I haven't tested the > > > > > > Android HAL yet (any volunteer ?). > > > > > > > > > > > > The most important change is in patches 13/27 and 14/27, which translate > > > > > > between V4L2 buffers and libcamera FrameBuffer to handle the case where > > > > > > a multi-planar frame buffer is used with the V4L2 single-planar API. > > > > > > It's working more or less by chance at the moment (except in qcam where > > > > > > it's broken, and possibly in other places I haven't tested). Patches > > > > > > 01/27 to 12/27 are cleanups and additions to prepare for the work in > > > > > > V4L2VideoDevice, and patch 15/27 is a small cleanup on top. Patches > > > > > > 16/27 and 17/27 then improve the FrameBuffer class API as a cleanup. > > > > > > > > > > > > Patches 18/27 to 27/27 fix the cam and qcam applications, as well as an > > > > > > issue in the Android HAL. Worth being noted is patch 19/27 that > > > > > > introduces an Image class shared by cam and qcam. The class duplicates > > > > > > the MappedFrameBuffer implementation private to libcamera. I've tried to > > > > > > rework MappedFrameBuffer into something I would be happy to see in the > > > > > > public API, but failed to do so in a reasonable amount of time, and I > > > > > > didn't want to delay this important regression fix. > > > > > > > > > > > > This series doesn't break any unit test, as vimc doesn't support NV12. > > > > > > Addition of NV12 support to the vimc kernel driver would be very nice, > > > > > > in order to test multi-planar support in our unit tests. Volunteers are > > > > > > welcome ;-) > > > > > > > > > > > > Laurent Pinchart (27): > > > > > > libcamera: base: utils: Use size_t for index in utils::enumerate() > > > > > > libcamera: file_descriptor: Add a function to retrieve the inode > > > > > > libcamera: v4l2_videodevice: Drop toV4L2PixelFormat() > > > > > > libcamera: Use V4L2PixelFormat::fromPixelFormat() > > > > > > libcamera: formats: Move plane info structure to PixelFormatInfo > > > > > > libcamera: formats: Add planeSize() helpers to PixelFormatInfo > > > > > > libcamera: formats: Support V4L2 non-contiguous formats > > > > > > libcamera: framebuffer: Move planes check to constructor > > > > > > libcamera: framebuffer: Add a function to check if planes are > > > > > > contiguous > > > > > > libcamera: v4l2_videodevice: Cache PixelFormatInfo > > > > > > libcamera: v4l2_videodevice: Document plane handling in createBuffer() > > > > > > libcamera: v4l2_videodevice: Take stride into account to compute > > > > > > offsets > > > > > > libcamera: v4l2_videodevice: Coalesce planes when queuing buffer > > > > > > libcamera: v4l2_videodevice: Split planes when dequeuing buffer > > > > > > libcamera: v4l2_videodevice: Use utils::enumerate() > > > > > > libcamera: framebuffer: Allocate metadata planes at construction time > > > > > > libcamera: framebuffer: Prevent modifying the number of metadata > > > > > > planes > > > > > > android: camera_device: Don't assume all planes use the same fd > > > > > > cam: Add Image class > > > > > > cam: file_sink: Use Image class to access pixel data > > > > > > cam: drm: Support per-plane stride values > > > > > > cam: drm: Set per-plane offsets when creating DRM frame buffer > > > > > > cam: drm: Avoid importing the same dmabuf multiple times > > > > > > qcam: Print bytesused for all planes > > > > > > qcam: Use Image class to access pixel data > > > > > > qcam: viewfinder_gl: Support multi-planar buffers > > > > > > qcam: viewfinder_qt: Support multi-planar buffers > > > > > > > > > > > > include/libcamera/base/utils.h | 4 +- > > > > > > include/libcamera/file_descriptor.h | 3 + > > > > > > include/libcamera/framebuffer.h | 19 +- > > > > > > include/libcamera/internal/formats.h | 22 +- > > > > > > include/libcamera/internal/framebuffer.h | 2 + > > > > > > include/libcamera/internal/v4l2_pixelformat.h | 2 +- > > > > > > include/libcamera/internal/v4l2_videodevice.h | 3 +- > > > > > > src/android/camera_device.cpp | 25 +- > > > > > > src/android/mm/generic_camera_buffer.cpp | 11 +- > > > > > > src/android/yuv/post_processor_yuv.cpp | 10 +- > > > > > > src/cam/camera_session.cpp | 4 +- > > > > > > src/cam/drm.cpp | 38 +- > > > > > > src/cam/drm.h | 7 +- > > > > > > src/cam/file_sink.cpp | 44 +-- > > > > > > src/cam/file_sink.h | 6 +- > > > > > > src/cam/image.cpp | 107 +++++ > > > > > > src/cam/image.h | 52 +++ > > > > > > src/cam/kms_sink.cpp | 28 +- > > > > > > src/cam/meson.build | 1 + > > > > > > src/libcamera/file_descriptor.cpp | 26 ++ > > > > > > src/libcamera/formats.cpp | 373 ++++++++++++++---- > > > > > > src/libcamera/framebuffer.cpp | 57 ++- > > > > > > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +- > > > > > > src/libcamera/pipeline/ipu3/imgu.cpp | 2 +- > > > > > > .../pipeline/raspberrypi/raspberrypi.cpp | 8 +- > > > > > > src/libcamera/pipeline/rkisp1/rkisp1_path.cpp | 6 +- > > > > > > src/libcamera/pipeline/simple/converter.cpp | 8 +- > > > > > > src/libcamera/pipeline/simple/simple.cpp | 4 +- > > > > > > src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 6 +- > > > > > > src/libcamera/pipeline/vimc/vimc.cpp | 8 +- > > > > > > src/libcamera/v4l2_pixelformat.cpp | 11 +- > > > > > > src/libcamera/v4l2_videodevice.cpp | 196 ++++++--- > > > > > > src/qcam/format_converter.cpp | 18 +- > > > > > > src/qcam/format_converter.h | 9 +- > > > > > > src/qcam/main_window.cpp | 38 +- > > > > > > src/qcam/main_window.h | 4 +- > > > > > > src/qcam/meson.build | 1 + > > > > > > src/qcam/viewfinder.h | 6 +- > > > > > > src/qcam/viewfinder_gl.cpp | 45 +-- > > > > > > src/qcam/viewfinder_gl.h | 4 +- > > > > > > src/qcam/viewfinder_qt.cpp | 20 +- > > > > > > src/qcam/viewfinder_qt.h | 2 +- > > > > > > src/v4l2/v4l2_camera_proxy.cpp | 11 +- > > > > > > test/libtest/buffer_source.cpp | 3 +- > > > > > > test/utils.cpp | 10 +- > > > > > > 45 files changed, 911 insertions(+), 357 deletions(-) > > > > > > create mode 100644 src/cam/image.cpp
Hi Laurent, On Mon, Sep 06, 2021 at 05:00:37AM +0300, Laurent Pinchart wrote: > Replace manual looked for V4L2 pixel format in the PixelFormatInfo with > the V4L2PixelFormat::fromPixelFormat() helper function. This prepares > for multi-planar support that will modify how V4L2 pixel formats are > stored in PixelFormatInfo. > > Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Paul Elder <paul.elder@ideasonboard.com> > --- > src/libcamera/pipeline/ipu3/cio2.cpp | 4 +--- > src/v4l2/v4l2_camera_proxy.cpp | 9 +++------ > 2 files changed, 4 insertions(+), 9 deletions(-) > > diff --git a/src/libcamera/pipeline/ipu3/cio2.cpp b/src/libcamera/pipeline/ipu3/cio2.cpp > index 9cedcb5b2879..dc62ab197acb 100644 > --- a/src/libcamera/pipeline/ipu3/cio2.cpp > +++ b/src/libcamera/pipeline/ipu3/cio2.cpp > @@ -203,9 +203,7 @@ int CIO2Device::configure(const Size &size, V4L2DeviceFormat *outputFormat) > if (itInfo == mbusCodesToPixelFormat.end()) > return -EINVAL; > > - const PixelFormatInfo &info = PixelFormatInfo::info(itInfo->second); > - > - outputFormat->fourcc = info.v4l2Format; > + outputFormat->fourcc = V4L2PixelFormat::fromPixelFormat(itInfo->second); > outputFormat->size = sensorFormat.size; > outputFormat->planesCount = 1; > > diff --git a/src/v4l2/v4l2_camera_proxy.cpp b/src/v4l2/v4l2_camera_proxy.cpp > index 07b1a90aa32f..d926a7b77083 100644 > --- a/src/v4l2/v4l2_camera_proxy.cpp > +++ b/src/v4l2/v4l2_camera_proxy.cpp > @@ -164,12 +164,11 @@ bool V4L2CameraProxy::validateMemoryType(uint32_t memory) > > void V4L2CameraProxy::setFmtFromConfig(const StreamConfiguration &streamConfig) > { > - const PixelFormatInfo &info = PixelFormatInfo::info(streamConfig.pixelFormat); > const Size &size = streamConfig.size; > > v4l2PixFormat_.width = size.width; > v4l2PixFormat_.height = size.height; > - v4l2PixFormat_.pixelformat = info.v4l2Format; > + v4l2PixFormat_.pixelformat = V4L2PixelFormat::fromPixelFormat(streamConfig.pixelFormat); > v4l2PixFormat_.field = V4L2_FIELD_NONE; > v4l2PixFormat_.bytesperline = streamConfig.stride; > v4l2PixFormat_.sizeimage = streamConfig.frameSize; > @@ -276,7 +275,7 @@ int V4L2CameraProxy::vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc * > /* \todo Add map from format to description. */ > utils::strlcpy(reinterpret_cast<char *>(arg->description), > "Video Format Description", sizeof(arg->description)); > - arg->pixelformat = PixelFormatInfo::info(format).v4l2Format; > + arg->pixelformat = V4L2PixelFormat::fromPixelFormat(format); > > memset(arg->reserved, 0, sizeof(arg->reserved)); > > @@ -311,11 +310,9 @@ int V4L2CameraProxy::tryFormat(struct v4l2_format *arg) > return -EINVAL; > } > > - const PixelFormatInfo &info = PixelFormatInfo::info(config.pixelFormat); > - > arg->fmt.pix.width = config.size.width; > arg->fmt.pix.height = config.size.height; > - arg->fmt.pix.pixelformat = info.v4l2Format; > + arg->fmt.pix.pixelformat = V4L2PixelFormat::fromPixelFormat(config.pixelFormat); > arg->fmt.pix.field = V4L2_FIELD_NONE; > arg->fmt.pix.bytesperline = config.stride; > arg->fmt.pix.sizeimage = config.frameSize; > -- > Regards, > > Laurent Pinchart >