@@ -882,14 +882,12 @@ As well as a list of supported StreamFormats, the StreamConfiguration is also
expected to provide an initialised default configuration. This may be arbitrary,
but depending on use case you may wish to select an output that matches the
Sensor output, or prefer a pixelformat which might provide higher performance on
-the hardware. The bufferCount represents the number of buffers required to
-support functional continuous processing on this stream.
+the hardware.
.. code-block:: cpp
cfg.pixelFormat = formats::BGR888;
cfg.size = { 1280, 720 };
- cfg.bufferCount = 4;
Finally add each ``StreamConfiguration`` generated to the
``CameraConfiguration``, and ensure that it has been validated before returning
@@ -955,8 +953,6 @@ Add the following function implementation to your file:
status = Adjusted;
}
- cfg.bufferCount = 4;
-
return status;
}
@@ -1200,13 +1196,20 @@ is performed by using the ``V4L2VideoDevice`` API, which provides an
.. _FrameBuffer: https://libcamera.org/api-html/classlibcamera_1_1FrameBuffer.html
+The number passed to ``importBuffers()`` should be at least equal to the value
+of the ``MinimumRequests`` property in order to be possible to queue enough
+buffers to the video device that frames won't be dropped during capture. A
+bigger value can be advantageous to reduce the thrashing of dma-buf file
+descriptor mappings in case the application queues more requests and therefore
+improve performance, but for simplicity we'll just use ``MinimumRequests``.
+
Implement the pipeline handler ``start()`` function by replacing the stub
version with the following code:
.. code-block:: c++
VividCameraData *data = cameraData(camera);
- unsigned int count = data->stream_.configuration().bufferCount;
+ unsigned int count = camera->properties().get(properties::MinimumRequests);
int ret = data->video_->importBuffers(count);
if (ret < 0)
@@ -106,9 +106,6 @@ private:
const Stream *stream_;
std::unique_ptr<V4L2M2MDevice> m2m_;
- unsigned int inputBufferCount_;
- unsigned int outputBufferCount_;
-
std::pair<Rectangle, Rectangle> inputCropBounds_;
};
@@ -46,8 +46,6 @@ struct StreamConfiguration {
unsigned int stride;
unsigned int frameSize;
- unsigned int bufferCount;
-
std::optional<ColorSpace> colorSpace;
Stream *stream() const { return stream_; }
@@ -14,6 +14,7 @@
#include <unistd.h>
#include <libcamera/formats.h>
+#include <libcamera/property_ids.h>
#include "jpeg/post_processor_jpeg.h"
#include "yuv/post_processor_yuv.h"
@@ -131,7 +132,9 @@ int CameraStream::configure()
allocator_ = std::make_unique<PlatformFrameBufferAllocator>(cameraDevice_);
mutex_ = std::make_unique<Mutex>();
- camera3Stream_->max_buffers = configuration().bufferCount;
+ unsigned int bufferCount =
+ cameraDevice_->camera()->properties().get(properties::MinimumRequests).value();
+ camera3Stream_->max_buffers = bufferCount;
return 0;
}
@@ -33,20 +33,6 @@ void Capture::configure(libcamera::Span<const libcamera::StreamRole> roles)
ASSERT_EQ(config_->size(), roles.size()) << "Unexpected number of streams in configuration";
- /*
- * Set the buffers count to the largest value across all streams.
- * \todo: Should all streams from a Camera have the same buffer count ?
- */
- auto largest =
- std::max_element(config_->begin(), config_->end(),
- [](const StreamConfiguration &l, const StreamConfiguration &r)
- { return l.bufferCount < r.bufferCount; });
-
- assert(largest != config_->end());
-
- for (auto &cfg : *config_)
- cfg.bufferCount = largest->bufferCount;
-
if (config_->validate() != CameraConfiguration::Valid) {
config_.reset();
FAIL() << "Configuration not valid";
@@ -141,9 +141,6 @@ int V4L2M2MConverter::V4L2M2MStream::configure(const StreamConfiguration &inputC
return -EINVAL;
}
- inputBufferCount_ = inputCfg.bufferCount;
- outputBufferCount_ = outputCfg.bufferCount;
-
if (converter_->features() & Feature::InputCrop) {
ret = getCropBounds(m2m_->output(), inputCropBounds_.first,
inputCropBounds_.second);
@@ -666,7 +666,6 @@ StreamConfiguration PipelineHandlerISI::generateYUVConfiguration(Camera *camera,
StreamConfiguration cfg(formats);
cfg.pixelFormat = pixelFormat;
cfg.size = sensorSize;
- cfg.bufferCount = 4;
return cfg;
}
@@ -734,7 +733,6 @@ StreamConfiguration PipelineHandlerISI::generateRawConfiguration(Camera *camera)
StreamConfiguration cfg(formats);
cfg.size = sensor->resolution();
cfg.pixelFormat = pixelFormat;
- cfg.bufferCount = 4;
return cfg;
}
@@ -97,7 +97,6 @@ private:
class IPU3CameraConfiguration : public CameraConfiguration
{
public:
- static constexpr unsigned int kBufferCount = 4;
static constexpr unsigned int kMaxStreams = 3;
IPU3CameraConfiguration(IPU3CameraData *data);
@@ -293,7 +292,6 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
/* Initialize the RAW stream with the CIO2 configuration. */
cfg->size = cio2Configuration_.size;
cfg->pixelFormat = cio2Configuration_.pixelFormat;
- cfg->bufferCount = cio2Configuration_.bufferCount;
cfg->stride = info.stride(cfg->size.width, 0, 64);
cfg->frameSize = info.frameSize(cfg->size, 64);
cfg->setStream(const_cast<Stream *>(&data_->rawStream_));
@@ -337,7 +335,6 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
ImgUDevice::kOutputAlignHeight);
cfg->pixelFormat = formats::NV12;
- cfg->bufferCount = kBufferCount;
cfg->stride = info.stride(cfg->size.width, 0, 1);
cfg->frameSize = info.frameSize(cfg->size, 1);
@@ -406,7 +403,6 @@ PipelineHandlerIPU3::generateConfiguration(Camera *camera, Span<const StreamRole
Size sensorResolution = data->cio2_.sensor()->resolution();
for (const StreamRole role : roles) {
std::map<PixelFormat, std::vector<SizeRange>> streamFormats;
- unsigned int bufferCount;
PixelFormat pixelFormat;
Size size;
@@ -426,7 +422,6 @@ PipelineHandlerIPU3::generateConfiguration(Camera *camera, Span<const StreamRole
.alignedDownTo(ImgUDevice::kOutputMarginWidth,
ImgUDevice::kOutputMarginHeight);
pixelFormat = formats::NV12;
- bufferCount = IPU3CameraConfiguration::kBufferCount;
streamFormats[pixelFormat] = { { ImgUDevice::kOutputMinSize, size } };
break;
@@ -436,7 +431,6 @@ PipelineHandlerIPU3::generateConfiguration(Camera *camera, Span<const StreamRole
data->cio2_.generateConfiguration(sensorResolution);
pixelFormat = cio2Config.pixelFormat;
size = cio2Config.size;
- bufferCount = cio2Config.bufferCount;
for (const PixelFormat &format : data->cio2_.formats())
streamFormats[format] = data->cio2_.sizes(format);
@@ -455,7 +449,6 @@ PipelineHandlerIPU3::generateConfiguration(Camera *camera, Span<const StreamRole
.alignedDownTo(ImgUDevice::kOutputAlignWidth,
ImgUDevice::kOutputAlignHeight);
pixelFormat = formats::NV12;
- bufferCount = IPU3CameraConfiguration::kBufferCount;
streamFormats[pixelFormat] = { { ImgUDevice::kOutputMinSize, size } };
break;
@@ -471,7 +464,6 @@ PipelineHandlerIPU3::generateConfiguration(Camera *camera, Span<const StreamRole
StreamConfiguration cfg(formats);
cfg.size = size;
cfg.pixelFormat = pixelFormat;
- cfg.bufferCount = bufferCount;
config->addConfiguration(cfg);
}
@@ -818,7 +818,6 @@ PipelineHandlerMaliC55::generateConfiguration(Camera *camera,
StreamFormats streamFormats(formats);
StreamConfiguration cfg(streamFormats);
cfg.pixelFormat = pixelFormat;
- cfg.bufferCount = 4;
cfg.size = size;
config->addConfiguration(cfg);
@@ -249,7 +249,6 @@ RkISP1Path::generateConfiguration(const CameraSensor *sensor, const Size &size,
StreamConfiguration cfg(formats);
cfg.pixelFormat = format;
cfg.size = streamSize;
- cfg.bufferCount = RKISP1_BUFFER_COUNT;
return cfg;
}
@@ -383,7 +382,6 @@ RkISP1Path::validate(const CameraSensor *sensor,
cfg->size.boundTo(maxResolution);
cfg->size.expandTo(minResolution);
- cfg->bufferCount = RKISP1_BUFFER_COUNT;
V4L2DeviceFormat format;
format.fourcc = video_->toV4L2PixelFormat(cfg->pixelFormat);
@@ -69,8 +69,6 @@ private:
void populateFormats();
Size filterSensorResolution(const CameraSensor *sensor);
- static constexpr unsigned int RKISP1_BUFFER_COUNT = 4;
-
const char *name_;
bool running_;
@@ -421,7 +421,6 @@ protected:
int queueRequestDevice(Camera *camera, Request *request) override;
private:
- static constexpr unsigned int kNumInternalBuffers = 3;
static constexpr unsigned int kSimpleBufferSlotCount = 16;
struct EntityData {
@@ -1239,7 +1238,7 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
cfg.size != pipeConfig_->captureSize)
needConversion_ = true;
- /* Set the stride, frameSize and bufferCount. */
+ /* Set the stride and frameSize. */
if (needConversion_) {
std::tie(cfg.stride, cfg.frameSize) =
data_->converter_
@@ -1261,8 +1260,6 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
cfg.stride = format.planes[0].bpl;
cfg.frameSize = format.planes[0].size;
}
-
- cfg.bufferCount = 4;
}
return status;
@@ -1407,7 +1404,6 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
inputCfg.pixelFormat = pipeConfig->captureFormat;
inputCfg.size = pipeConfig->captureSize;
inputCfg.stride = captureFormat.planes[0].bpl;
- inputCfg.bufferCount = kNumInternalBuffers;
if (data->converter_) {
/*
@@ -186,8 +186,6 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
status = Adjusted;
}
- cfg.bufferCount = 4;
-
V4L2DeviceFormat format;
format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -248,7 +246,6 @@ PipelineHandlerUVC::generateConfiguration(Camera *camera,
cfg.pixelFormat = formats.pixelformats().front();
cfg.size = formats.sizes(cfg.pixelFormat).back();
- cfg.bufferCount = 4;
config->addConfiguration(cfg);
@@ -183,8 +183,6 @@ CameraConfiguration::Status VimcCameraConfiguration::validate()
status = Adjusted;
}
- cfg.bufferCount = 4;
-
V4L2DeviceFormat format;
format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -244,7 +242,6 @@ PipelineHandlerVimc::generateConfiguration(Camera *camera,
cfg.pixelFormat = formats::BGR888;
cfg.size = { 1920, 1080 };
- cfg.bufferCount = 4;
config->addConfiguration(cfg);
@@ -67,8 +67,6 @@ overloaded(Ts...) -> overloaded<Ts...>;
class VirtualCameraConfiguration : public CameraConfiguration
{
public:
- static constexpr unsigned int kBufferCount = 4;
-
VirtualCameraConfiguration(VirtualCameraData *data);
Status validate() override;
@@ -188,8 +186,6 @@ CameraConfiguration::Status VirtualCameraConfiguration::validate()
const PixelFormatInfo &info = PixelFormatInfo::info(cfg.pixelFormat);
cfg.stride = info.stride(cfg.size.width, 0, 1);
cfg.frameSize = info.frameSize(cfg.size, 1);
-
- cfg.bufferCount = VirtualCameraConfiguration::kBufferCount;
}
return status;
@@ -244,7 +240,6 @@ PipelineHandlerVirtual::generateConfiguration(Camera *camera,
StreamConfiguration cfg(formats);
cfg.pixelFormat = pixelFormat;
cfg.size = data->config_.maxResolutionSize;
- cfg.bufferCount = VirtualCameraConfiguration::kBufferCount;
config->addConfiguration(cfg);
}
@@ -280,8 +280,7 @@ SizeRange StreamFormats::range(const PixelFormat &pixelformat) const
* handlers provide StreamFormats.
*/
StreamConfiguration::StreamConfiguration()
- : pixelFormat(0), stride(0), frameSize(0), bufferCount(0),
- stream_(nullptr)
+ : pixelFormat(0), stride(0), frameSize(0), stream_(nullptr)
{
}
@@ -289,8 +288,8 @@ StreamConfiguration::StreamConfiguration()
* \brief Construct a configuration with stream formats
*/
StreamConfiguration::StreamConfiguration(const StreamFormats &formats)
- : pixelFormat(0), stride(0), frameSize(0), bufferCount(0),
- stream_(nullptr), formats_(formats)
+ : pixelFormat(0), stride(0), frameSize(0), stream_(nullptr),
+ formats_(formats)
{
}
@@ -325,11 +324,6 @@ StreamConfiguration::StreamConfiguration(const StreamFormats &formats)
* validating the configuration with a call to CameraConfiguration::validate().
*/
-/**
- * \var StreamConfiguration::bufferCount
- * \brief Requested number of buffers to allocate for the stream
- */
-
/**
* \var StreamConfiguration::colorSpace
* \brief The ColorSpace for this stream
@@ -337,7 +337,6 @@ PYBIND11_MODULE(_libcamera, m)
.def_readwrite("pixel_format", &StreamConfiguration::pixelFormat)
.def_readwrite("stride", &StreamConfiguration::stride)
.def_readwrite("frame_size", &StreamConfiguration::frameSize)
- .def_readwrite("buffer_count", &StreamConfiguration::bufferCount)
.def_property_readonly("formats", &StreamConfiguration::formats,
py::return_value_policy::reference_internal)
.def_readwrite("color_space", &StreamConfiguration::colorSpace);
@@ -16,6 +16,8 @@
#include <libcamera/base/thread.h>
#include <libcamera/base/timer.h>
+#include <libcamera/property_ids.h>
+
#include "libcamera/internal/device_enumerator.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/v4l2_videodevice.h"
@@ -97,10 +99,13 @@ protected:
return TestFail;
}
+ unsigned int bufferCount =
+ camera_->properties().get(properties::MinimumRequests).value();
+
Stream *stream = cfg.stream();
BufferSource source;
- int ret = source.allocate(cfg);
+ int ret = source.allocate(cfg, bufferCount);
if (ret != TestPass)
return ret;
@@ -137,7 +142,7 @@ protected:
}
}
- const unsigned int nFrames = cfg.bufferCount * 2;
+ const unsigned int nFrames = bufferCount * 2;
Timer timer;
timer.start(500ms * nFrames);
@@ -26,7 +26,7 @@ BufferSource::~BufferSource()
media_->release();
}
-int BufferSource::allocate(const StreamConfiguration &config)
+int BufferSource::allocate(const StreamConfiguration &config, unsigned int count)
{
/* Locate and open the video device. */
std::string videoDeviceName = "vivid-000-vid-out";
@@ -78,7 +78,7 @@ int BufferSource::allocate(const StreamConfiguration &config)
return TestFail;
}
- if (video->allocateBuffers(config.bufferCount, &buffers_) < 0) {
+ if (video->allocateBuffers(count, &buffers_) < 0) {
std::cout << "Failed to allocate buffers" << std::endl;
return TestFail;
}
@@ -18,7 +18,7 @@ public:
BufferSource();
~BufferSource();
- int allocate(const libcamera::StreamConfiguration &config);
+ int allocate(const libcamera::StreamConfiguration &config, unsigned int count);
const std::vector<std::unique_ptr<libcamera::FrameBuffer>> &buffers();
private:
@@ -174,10 +174,9 @@ public:
StreamConfiguration cfg;
cfg.pixelFormat = formats::YUYV;
cfg.size = Size(600, 800);
- cfg.bufferCount = numBuffers;
BufferSource source;
- int ret = source.allocate(cfg);
+ int ret = source.allocate(cfg, numBuffers);
if (ret != TestPass)
return ret;