@@ -6,10 +6,12 @@
*/
#include <algorithm>
+#include <cstdio>
#include <iomanip>
#include <memory>
#include <vector>
+#include <linux/intel-ipu3.h>
#include <linux/media-bus-format.h>
#include <libcamera/camera.h>
@@ -33,6 +35,7 @@ class ImgUDevice
{
public:
static constexpr unsigned int PAD_INPUT = 0;
+ static constexpr unsigned int PAD_PARAMS = 1;
static constexpr unsigned int PAD_OUTPUT = 2;
static constexpr unsigned int PAD_VF = 3;
static constexpr unsigned int PAD_STAT = 4;
@@ -89,11 +92,15 @@ public:
ImgUOutput output_;
ImgUOutput viewfinder_;
ImgUOutput stat_;
- /* \todo Add param video device for 3A tuning */
+ V4L2Device *params_;
BufferPool vfPool_;
BufferPool statPool_;
+ BufferPool paramsPool_;
BufferPool outPool_;
+
+ std::vector<Buffer *> statBuffers;
+ std::vector<Buffer *> paramsBuffers;
};
class CIO2Device
@@ -154,6 +161,8 @@ public:
}
void imguOutputBufferReady(Buffer *buffer);
+ void statBufferReady(Buffer *buffer);
+ void paramsBufferReady(Buffer *buffer);
void imguInputBufferReady(Buffer *buffer);
void cio2BufferReady(Buffer *buffer);
@@ -549,16 +558,49 @@ int PipelineHandlerIPU3::configure(Camera *camera, CameraConfiguration *c)
}
/*
- * Apply the largest available format to the stat node.
- * \todo Revise this when we'll actually use the stat node.
+ * Configure the statistics and parameter devices and subdevices.
+ *
+ * \todo: This is the datasize calculation using pagesize() implemented
+ * in the Intel IPU3 Camera HAL.
+ *
+ * unsigned int page_size = getpagesize();
+ * unsigned int datasize = sizeof(struct ipu3_uapi_stats_3a) + page_size
+ * - (sizeof(struct ipu3_uapi_stats_3a) % page_size);
+ *
+ * Find out why they do so, as v4l2_format.fmt.meta.datasize is
+ * said to be set by the driver by V4L2 specification
*/
- StreamConfiguration statCfg = {};
- statCfg.size = cio2Format.size;
- ret = imgu->configureOutput(&imgu->stat_, statCfg);
+ V4L2SubdeviceFormat metaSubdevFormat = {};
+ V4L2DeviceFormat metaDeviceFormat = {};
+
+ metaSubdevFormat.mbus_code = V4L2_META_FMT_IPU3_STAT_3A;
+ ret = imgu->imgu_->setFormat(ImgUDevice::PAD_STAT, &metaSubdevFormat);
if (ret)
return ret;
+ metaDeviceFormat.fourcc = V4L2_META_FMT_IPU3_STAT_3A;
+ ret = imgu->stat_.dev->setFormat(&metaDeviceFormat);
+ if (ret)
+ return ret;
+
+ LOG(IPU3, Debug) << "ImgU stat format" << metaDeviceFormat.toString();
+
+ metaSubdevFormat = {};
+ metaDeviceFormat = {};
+
+ metaSubdevFormat.mbus_code = V4L2_META_FMT_IPU3_PARAMS;
+ ret = imgu->imgu_->setFormat(ImgUDevice::PAD_PARAMS, &metaSubdevFormat);
+ if (ret)
+ return ret;
+
+ metaDeviceFormat.fourcc = V4L2_META_FMT_IPU3_PARAMS;
+ ret = imgu->stat_.dev->setFormat(&metaDeviceFormat);
+ if (ret)
+ return ret;
+
+ LOG(IPU3, Debug) << "ImgU param format" << metaDeviceFormat.toString();
+
return 0;
}
@@ -601,6 +643,21 @@ int PipelineHandlerIPU3::allocateBuffers(Camera *camera,
if (ret)
goto error;
+ /* Reserve the same number of buffers in the params node. */
+ imgu->paramsPool_.createBuffers(bufferCount);
+ ret = imgu->params_->exportBuffers(&imgu->paramsPool_);
+ if (ret)
+ goto error;
+
+ /*
+ * Store the available stat and param buffers in a vector and
+ * queue them at the opportune time.
+ */
+ for (Buffer &b : imgu->stat_.pool->buffers())
+ imgu->statBuffers.push_back(&b);
+ for (Buffer &b : imgu->paramsPool_.buffers())
+ imgu->paramsBuffers.push_back(&b);
+
/* Allocate buffers for each active stream. */
for (Stream *s : streams) {
IPU3Stream *stream = static_cast<IPU3Stream *>(s);
@@ -858,6 +915,10 @@ int PipelineHandlerIPU3::registerCameras()
&IPU3CameraData::imguOutputBufferReady);
data->imgu_->viewfinder_.dev->bufferReady.connect(data.get(),
&IPU3CameraData::imguOutputBufferReady);
+ data->imgu_->stat_.dev->bufferReady.connect(data.get(),
+ &IPU3CameraData::statBufferReady);
+ data->imgu_->params_->bufferReady.connect(data.get(),
+ &IPU3CameraData::paramsBufferReady);
/* Create and register the Camera instance. */
std::string cameraName = cio2->sensor_->entity()->name() + " "
@@ -919,15 +980,124 @@ void IPU3CameraData::imguOutputBufferReady(Buffer *buffer)
}
}
+/**
+ * \brief Handle buffers completion at the ImgU statistics output
+ * \param[in] buffer The completed buffer
+ *
+ * HACK: this is just a proof of concept.
+ * Inspect the statistics by printing a few of them out.
+ */
+void IPU3CameraData::statBufferReady(Buffer *buffer)
+{
+ struct ipu3_uapi_stats_3a *stats_3a =
+ static_cast<struct ipu3_uapi_stats_3a *>(buffer->planes()[0].mem());
+ struct ipu3_uapi_awb_raw_buffer *raw_awb = &stats_3a->awb_raw_buffer;
+ uint8_t *awb_meta = raw_awb->meta_data;
+
+ /*
+ * 'Inspect' stats and return the buffer to the vector of
+ * available ones. Print them out for visual inspection!
+ */
+ printf("\n");
+ for (unsigned int i = 0; i < IPU3_UAPI_AWB_MAX_BUFFER_SIZE; i++) {
+ if (!*awb_meta)
+ break;
+
+ printf("%u ", *awb_meta++);
+ }
+ printf("\n");
+
+ imgu_->stat_.dev->queueBuffer(buffer);
+}
+
+/**
+ * \brief Handle buffers completion at the ImgU parameters input
+ * \param[in] buffer The completed buffer
+ *
+ * The completed parameters buffer has been processed and can be returned
+ * to the vector of available buffers.
+ */
+void IPU3CameraData::paramsBufferReady(Buffer *buffer)
+{
+ imgu_->paramsBuffers.push_back(buffer);
+}
+
/**
* \brief Handle buffers completion at the CIO2 output
* \param[in] buffer The completed buffer
*
* Buffers completed from the CIO2 are immediately queued to the ImgU unit
* for further processing.
+ *
+ * HACK: apply a few parameters to the ImgU before queuing the image frame
+ * for processing. This is just a proof of concept.
*/
void IPU3CameraData::cio2BufferReady(Buffer *buffer)
{
+ /*
+ * If no buffer is available for parameter configuration, just
+ * queue the new image buffer to the ImgU.
+ *
+ * Otherwise prepare the parameter configuration buffer and provide
+ * it to the ImgU parameters node -before- the new image frame gets
+ * queued.
+ */
+ if (imgu_->paramsBuffers.empty()) {
+ imgu_->input_->queueBuffer(buffer);
+ return;
+ }
+
+ auto b = imgu_->paramsBuffers.begin();
+ struct ipu3_uapi_params *params = static_cast<struct ipu3_uapi_params *>
+ ((*b)->planes()[0].mem());
+ struct ipu3_uapi_acc_param *acc_param = ¶ms->acc_param;
+ struct ipu3_uapi_csc_mat_config *csc_mat = &acc_param->csc;
+
+ /* Default CSC matrix values. Here just for reference. */
+ csc_mat->coeff_c11 = 4898;
+ csc_mat->coeff_c12 = 9617;
+ csc_mat->coeff_c13 = 1867;
+ csc_mat->coeff_b1 = 0;
+ csc_mat->coeff_c21 = -2410;
+ csc_mat->coeff_c22 = -4732;
+ csc_mat->coeff_c23 = 7143;
+ csc_mat->coeff_b2 = 0;
+ csc_mat->coeff_c31 = 10076;
+ csc_mat->coeff_c32 = -8437;
+ csc_mat->coeff_c33 = -1638;
+ csc_mat->coeff_b3 = 0;
+
+ /*
+ * Halve the default CSC matrix coefficient and bias.
+ * The resulting image will have mangled colors (it's actually just
+ * 'darker' from local testings).
+ */
+ csc_mat->coeff_c11 = 2449;
+ csc_mat->coeff_c12 = 4809;
+ csc_mat->coeff_c13 = 932;
+ csc_mat->coeff_b1 = 0;
+ csc_mat->coeff_c21 = -1205;
+ csc_mat->coeff_c22 = -2312;
+ csc_mat->coeff_c23 = 3571;
+ csc_mat->coeff_b2 = 0;
+ csc_mat->coeff_c31 = 5038;
+ csc_mat->coeff_c32 = -4219;
+ csc_mat->coeff_c33 = -819;
+ csc_mat->coeff_b3 = 0;
+
+ params->use.acc_csc = 1;
+
+ imgu_->paramsBuffers.erase(b);
+ int ret = imgu_->params_->queueBuffer(*b);
+ if (ret) {
+ LOG(IPU3, Error) << "Failed to queue parameters";
+ return;
+ }
+
+ /*
+ * Once parameters have been applied, it's safe to provide the ImgU
+ * a new frame to process.
+ */
imgu_->input_->queueBuffer(buffer);
}
@@ -999,6 +1169,11 @@ int ImgUDevice::init(MediaDevice *media, unsigned int index)
stat_.name = "stat";
stat_.pool = &statPool_;
+ params_ = V4L2Device::fromEntityName(media, name_ + " parameters");
+ ret = params_->open();
+ if (ret)
+ return ret;
+
return 0;
}
@@ -1154,6 +1329,10 @@ void ImgUDevice::freeBuffers()
if (ret)
LOG(IPU3, Error) << "Failed to release ImgU viewfinder buffers";
+ ret = params_->releaseBuffers();
+ if (ret)
+ LOG(IPU3, Error) << "Failed to release ImgU params buffers";
+
ret = input_->releaseBuffers();
if (ret)
LOG(IPU3, Error) << "Failed to release ImgU input buffers";
@@ -1176,12 +1355,23 @@ int ImgUDevice::start()
return ret;
}
+ for (Buffer &b : stat_.pool->buffers()) {
+ int ret = stat_.dev->queueBuffer(&b);
+ if (ret < 0)
+ return ret;
+ }
ret = stat_.dev->streamOn();
if (ret) {
LOG(IPU3, Error) << "Failed to start ImgU stat";
return ret;
}
+ ret = params_->streamOn();
+ if (ret) {
+ LOG(IPU3, Error) << "Failed to start ImgU params";
+ return ret;
+ }
+
ret = input_->streamOn();
if (ret) {
LOG(IPU3, Error) << "Failed to start ImgU input";
@@ -1198,6 +1388,7 @@ int ImgUDevice::stop()
ret = output_.dev->streamOff();
ret |= viewfinder_.dev->streamOff();
ret |= stat_.dev->streamOff();
+ ret |= params_->streamOff();
ret |= input_->streamOff();
return ret;
@@ -1238,6 +1429,7 @@ int ImgUDevice::linkSetup(const std::string &source, unsigned int sourcePad,
int ImgUDevice::enableLinks(bool enable)
{
std::string viewfinderName = name_ + " viewfinder";
+ std::string paramsName = name_ + " parameters";
std::string outputName = name_ + " output";
std::string statName = name_ + " 3a stat";
std::string inputName = name_ + " input";
@@ -1247,6 +1439,10 @@ int ImgUDevice::enableLinks(bool enable)
if (ret)
return ret;
+ ret = linkSetup(paramsName, 0, name_, PAD_PARAMS, enable);
+ if (ret)
+ return ret;
+
ret = linkSetup(name_, PAD_OUTPUT, outputName, 0, enable);
if (ret)
return ret;