@@ -18,17 +18,6 @@ namespace libcamera {
namespace RPi {
-enum BufferMask {
- ID = 0x00ffff,
- STATS = 0x010000,
- EMBEDDED_DATA = 0x020000,
- BAYER_DATA = 0x040000,
- EXTERNAL_BUFFER = 0x100000,
-};
-
-/* Size of the LS grid allocation. */
-static constexpr unsigned int MaxLsGridSize = 32 << 10;
-
/*
* List of controls handled by the Raspberry Pi IPA
*
@@ -20,6 +20,7 @@
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/request.h>
#include <libcamera/span.h>
@@ -62,7 +63,7 @@ constexpr double defaultMaxFrameDuration = 1e6 / 0.01;
LOG_DEFINE_CATEGORY(IPARPI)
-class IPARPi : public IPAInterface
+class IPARPi : public ipa::rpi::IPARPiInterface
{
public:
IPARPi()
@@ -75,21 +76,24 @@ public:
~IPARPi()
{
if (lsTable_)
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::rpi::MaxLsGridSize);
}
int init(const IPASettings &settings) override;
- int start(const IPAOperationData &data, IPAOperationData *result) override;
+ void start(const ipa::rpi::StartControls &data,
+ ipa::rpi::StartControls *result) override;
void stop() override {}
void configure(const CameraSensorInfo &sensorInfo,
const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *response) override;
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::rpi::ConfigInput &data,
+ ipa::rpi::ConfigOutput *response, int32_t *ret) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void processEvent(const IPAOperationData &event) override;
+ void signalStatReady(const uint32_t bufferId) override;
+ void signalQueueRequest(const ControlList &controls) override;
+ void signalIspPrepare(const ipa::rpi::ISPConfig &data) override;
private:
void setMode(const CameraSensorInfo &sensorInfo);
@@ -164,15 +168,15 @@ int IPARPi::init(const IPASettings &settings)
return 0;
}
-int IPARPi::start(const IPAOperationData &data, IPAOperationData *result)
+void IPARPi::start(const ipa::rpi::StartControls &data,
+ ipa::rpi::StartControls *result)
{
RPiController::Metadata metadata;
ASSERT(result);
- result->operation = 0;
- if (data.operation & RPi::IPA_CONFIG_STARTUP_CTRLS) {
+ if (!data.controls.empty()) {
/* We have been given some controls to action before start. */
- queueRequest(data.controls[0]);
+ queueRequest(data.controls);
}
controller_.SwitchMode(mode_, &metadata);
@@ -187,8 +191,7 @@ int IPARPi::start(const IPAOperationData &data, IPAOperationData *result)
if (agcStatus.shutter_time != 0.0 && agcStatus.analogue_gain != 0.0) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
- result->controls.emplace_back(ctrls);
- result->operation |= RPi::IPA_RESULT_SENSOR_CTRLS;
+ result->controls = std::move(ctrls);
}
/*
@@ -235,12 +238,9 @@ int IPARPi::start(const IPAOperationData &data, IPAOperationData *result)
mistrustCount_ = helper_->MistrustFramesModeSwitch();
}
- result->data.push_back(dropFrame);
- result->operation |= RPi::IPA_RESULT_DROP_FRAMES;
+ result->dropFrameCount = dropFrame;
firstStart_ = false;
-
- return 0;
}
void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
@@ -290,30 +290,30 @@ void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
void IPARPi::configure(const CameraSensorInfo &sensorInfo,
[[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result)
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::rpi::ConfigInput &ipaConfig,
+ ipa::rpi::ConfigOutput *result, int32_t *ret)
{
if (entityControls.size() != 2) {
LOG(IPARPI, Error) << "No ISP or sensor controls found.";
- result->operation = RPi::IPA_RESULT_CONFIG_FAILED;
+ *ret = -1;
return;
}
- result->operation = 0;
+ result->params = 0;
sensorCtrls_ = entityControls.at(0);
ispCtrls_ = entityControls.at(1);
if (!validateSensorControls()) {
LOG(IPARPI, Error) << "Sensor control validation failed.";
- result->operation = RPi::IPA_RESULT_CONFIG_FAILED;
+ *ret = -1;
return;
}
if (!validateIspControls()) {
LOG(IPARPI, Error) << "ISP control validation failed.";
- result->operation = RPi::IPA_RESULT_CONFIG_FAILED;
+ *ret = -1;
return;
}
@@ -332,7 +332,7 @@ void IPARPi::configure(const CameraSensorInfo &sensorInfo,
if (!helper_) {
LOG(IPARPI, Error) << "Could not create camera helper for "
<< cameraName;
- result->operation = RPi::IPA_RESULT_CONFIG_FAILED;
+ *ret = -1;
return;
}
@@ -344,35 +344,30 @@ void IPARPi::configure(const CameraSensorInfo &sensorInfo,
helper_->GetDelays(exposureDelay, gainDelay);
sensorMetadata = helper_->SensorEmbeddedDataPresent();
- result->data.push_back(gainDelay);
- result->data.push_back(exposureDelay); /* For EXPOSURE ctrl */
- result->data.push_back(exposureDelay); /* For VBLANK ctrl */
- result->data.push_back(sensorMetadata);
-
- result->operation |= RPi::IPA_RESULT_SENSOR_PARAMS;
+ result->params |= ipa::rpi::ConfigStaggeredWrite;
+ result->sensorConfig.gainDelay = gainDelay;
+ result->sensorConfig.exposureDelay = exposureDelay;
+ result->sensorConfig.vblank = exposureDelay;
+ result->sensorConfig.sensorMetadata = sensorMetadata;
}
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
- /*
- * The ipaConfig.data always gives us the user transform first. Note that
- * this will always make the LS table pointer (if present) element 1.
- */
- mode_.transform = static_cast<libcamera::Transform>(ipaConfig.data[0]);
+ mode_.transform = static_cast<libcamera::Transform>(ipaConfig.transform);
/* Store the lens shading table pointer and handle if available. */
- if (ipaConfig.operation & RPi::IPA_CONFIG_LS_TABLE) {
+ if (ipaConfig.lsTableHandle.isValid()) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::rpi::MaxLsGridSize);
lsTable_ = nullptr;
}
- /* Map the LS table buffer into user space (now element 1). */
- lsTableHandle_ = FileDescriptor(ipaConfig.data[1]);
+ /* Map the LS table buffer into user space. */
+ lsTableHandle_ = std::move(ipaConfig.lsTableHandle);
if (lsTableHandle_.isValid()) {
- lsTable_ = mmap(nullptr, RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
+ lsTable_ = mmap(nullptr, ipa::rpi::MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.fd(), 0);
if (lsTable_ == MAP_FAILED) {
@@ -401,11 +396,12 @@ void IPARPi::configure(const CameraSensorInfo &sensorInfo,
agcStatus.analogue_gain = DefaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
- result->controls.emplace_back(ctrls);
- result->operation |= RPi::IPA_RESULT_SENSOR_CTRLS;
+ result->controls = std::move(ctrls);
}
lastMode_ = mode_;
+
+ *ret = 0;
}
void IPARPi::mapBuffers(const std::vector<IPABuffer> &buffers)
@@ -427,56 +423,35 @@ void IPARPi::unmapBuffers(const std::vector<unsigned int> &ids)
}
}
-void IPARPi::processEvent(const IPAOperationData &event)
+void IPARPi::signalStatReady(uint32_t bufferId)
{
- switch (event.operation) {
- case RPi::IPA_EVENT_SIGNAL_STAT_READY: {
- unsigned int bufferId = event.data[0];
-
- if (++checkCount_ != frameCount_) /* assert here? */
- LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
- if (frameCount_ > mistrustCount_)
- processStats(bufferId);
-
- reportMetadata();
-
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_STATS_METADATA_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- op.controls = { libcameraMetadata_ };
- queueFrameAction.emit(0, op);
- break;
- }
+ if (++checkCount_ != frameCount_) /* assert here? */
+ LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
+ if (frameCount_ > mistrustCount_)
+ processStats(bufferId);
- case RPi::IPA_EVENT_SIGNAL_ISP_PREPARE: {
- unsigned int embeddedbufferId = event.data[0];
- unsigned int bayerbufferId = event.data[1];
+ reportMetadata();
- /*
- * At start-up, or after a mode-switch, we may want to
- * avoid running the control algos for a few frames in case
- * they are "unreliable".
- */
- prepareISP(embeddedbufferId);
- frameCount_++;
-
- /* Ready to push the input buffer into the ISP. */
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_RUN_ISP;
- op.data = { bayerbufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
- break;
- }
+ statsMetadataComplete.emit(bufferId & ipa::rpi::MaskID, libcameraMetadata_);
+}
- case RPi::IPA_EVENT_QUEUE_REQUEST: {
- queueRequest(event.controls[0]);
- break;
- }
+void IPARPi::signalQueueRequest(const ControlList &controls)
+{
+ queueRequest(controls);
+}
- default:
- LOG(IPARPI, Error) << "Unknown event " << event.operation;
- break;
- }
+void IPARPi::signalIspPrepare(const ipa::rpi::ISPConfig &data)
+{
+ /*
+ * At start-up, or after a mode-switch, we may want to
+ * avoid running the control algos for a few frames in case
+ * they are "unreliable".
+ */
+ prepareISP(data.embeddedbufferId);
+ frameCount_++;
+
+ /* Ready to push the input buffer into the ISP. */
+ runIsp.emit(data.bayerbufferId & ipa::rpi::MaskID);
}
void IPARPi::reportMetadata()
@@ -931,10 +906,7 @@ void IPARPi::queueRequest(const ControlList &controls)
void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
{
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_EMBEDDED_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
+ embeddedComplete.emit(bufferId & ipa::rpi::MaskID);
}
void IPARPi::prepareISP(unsigned int bufferId)
@@ -995,12 +967,8 @@ void IPARPi::prepareISP(unsigned int bufferId)
if (dpcStatus)
applyDPC(dpcStatus, ctrls);
- if (!ctrls.empty()) {
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_V4L2_SET_ISP;
- op.controls.push_back(ctrls);
- queueFrameAction.emit(0, op);
- }
+ if (!ctrls.empty())
+ setIsp.emit(ctrls);
}
}
@@ -1057,10 +1025,7 @@ void IPARPi::processStats(unsigned int bufferId)
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_SET_DELAYED_CTRLS;
- op.controls.emplace_back(ctrls);
- queueFrameAction.emit(0, op);
+ setDelayedControls.emit(ctrls);
}
}
@@ -1299,13 +1264,14 @@ void IPARPi::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
.grid_width = w,
.grid_stride = w,
.grid_height = h,
- .dmabuf = lsTableHandle_.fd(),
+ /* .dmabuf will be filled in by pipeline handler. */
+ .dmabuf = 0,
.ref_transform = 0,
.corner_sampled = 1,
.gain_format = GAIN_FORMAT_U4P10
};
- if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > RPi::MaxLsGridSize) {
+ if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > ipa::rpi::MaxLsGridSize) {
LOG(IPARPI, Error) << "Do not have a correctly allocate lens shading table!";
return;
}
@@ -1376,9 +1342,9 @@ const struct IPAModuleInfo ipaModuleInfo = {
"raspberrypi",
};
-struct ipa_context *ipaCreate()
+IPAInterface *ipaCreate()
{
- return new IPAInterfaceWrapper(std::make_unique<IPARPi>());
+ return new IPARPi();
}
} /* extern "C" */
@@ -18,10 +18,13 @@
#include <libcamera/file_descriptor.h>
#include <libcamera/formats.h>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
+#include <libcamera/ipa/raspberrypi_ipa_proxy.h>
#include <libcamera/logging.h>
#include <libcamera/property_ids.h>
#include <libcamera/request.h>
+#include <linux/bcm2835-isp.h>
#include <linux/videodev2.h>
#include "libcamera/internal/bayer_format.h"
@@ -146,7 +149,11 @@ public:
int loadIPA();
int configureIPA(const CameraConfiguration *config);
- void queueFrameAction(unsigned int frame, const IPAOperationData &action);
+ void statsMetadataComplete(uint32_t bufferId, const ControlList &controls);
+ void runIsp(uint32_t bufferId);
+ void embeddedComplete(uint32_t bufferId);
+ void setIsp(const ControlList &controls);
+ void setDelayedControls(const ControlList &controls);
/* bufferComplete signal handlers. */
void unicamBufferDequeue(FrameBuffer *buffer);
@@ -159,6 +166,8 @@ public:
void handleState();
void applyScalerCrop(const ControlList &controls);
+ std::unique_ptr<ipa::rpi::IPAProxyRPi> ipa_;
+
std::unique_ptr<CameraSensor> sensor_;
/* Array of Unicam and ISP device streams and associated buffers/streams. */
RPi::Device<Unicam, 2> unicam_;
@@ -751,7 +760,7 @@ int PipelineHandlerRPi::exportFrameBuffers([[maybe_unused]] Camera *camera, Stre
return ret;
}
-int PipelineHandlerRPi::start(Camera *camera, [[maybe_unused]] ControlList *controls)
+int PipelineHandlerRPi::start(Camera *camera, ControlList *controls)
{
RPiCameraData *data = cameraData(camera);
int ret;
@@ -769,30 +778,20 @@ int PipelineHandlerRPi::start(Camera *camera, [[maybe_unused]] ControlList *cont
data->applyScalerCrop(*controls);
/* Start the IPA. */
- IPAOperationData ipaData = {};
- IPAOperationData result = {};
- if (controls) {
- ipaData.operation = RPi::IPA_CONFIG_STARTUP_CTRLS;
- ipaData.controls.emplace_back(*controls);
- }
- ret = data->ipa_->start(ipaData, &result);
- if (ret) {
- LOG(RPI, Error)
- << "Failed to start IPA for " << camera->id();
- stop(camera);
- return ret;
- }
+ ipa::rpi::StartControls ipaData;
+ ipa::rpi::StartControls result;
+ if (controls)
+ ipaData.controls = *controls;
+ data->ipa_->start(ipaData, &result);
/* Apply any gain/exposure settings that the IPA may have passed back. */
- if (result.operation & RPi::IPA_RESULT_SENSOR_CTRLS) {
- ControlList &ctrls = result.controls[0];
+ if (!result.controls.empty()) {
+ ControlList &ctrls = result.controls;
data->unicam_[Unicam::Image].dev()->setControls(&ctrls);
}
- if (result.operation & RPi::IPA_RESULT_DROP_FRAMES) {
- /* Configure the number of dropped frames required on startup. */
- data->dropFrameCount_ = result.data[0];
- }
+ /* Configure the number of dropped frames required on startup. */
+ data->dropFrameCount_ = result.dropFrameCount;
/* We need to set the dropFrameCount_ before queueing buffers. */
ret = queueAllBuffers(camera);
@@ -1115,8 +1114,8 @@ int PipelineHandlerRPi::prepareBuffers(Camera *camera)
* Pass the stats and embedded data buffers to the IPA. No other
* buffers need to be passed.
*/
- mapBuffers(camera, data->isp_[Isp::Stats].getBuffers(), RPi::BufferMask::STATS);
- mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(), RPi::BufferMask::EMBEDDED_DATA);
+ mapBuffers(camera, data->isp_[Isp::Stats].getBuffers(), ipa::rpi::MaskStats);
+ mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(), ipa::rpi::MaskEmbeddedData);
return 0;
}
@@ -1133,8 +1132,8 @@ void PipelineHandlerRPi::mapBuffers(Camera *camera, const RPi::BufferMap &buffer
* handler and the IPA.
*/
for (auto const &it : buffers) {
- ipaBuffers.push_back({ .id = mask | it.first,
- .planes = it.second->planes() });
+ ipaBuffers.push_back(IPABuffer(mask | it.first,
+ it.second->planes()));
data->ipaBuffers_.insert(mask | it.first);
}
@@ -1165,15 +1164,18 @@ void RPiCameraData::frameStarted(uint32_t sequence)
int RPiCameraData::loadIPA()
{
- ipa_ = IPAManager::createIPA(pipe_, 1, 1);
+ ipa_ = IPAManager::createIPA<ipa::rpi::IPAProxyRPi>(pipe_, 1, 1);
+
if (!ipa_)
return -ENOENT;
- ipa_->queueFrameAction.connect(this, &RPiCameraData::queueFrameAction);
+ ipa_->statsMetadataComplete.connect(this, &RPiCameraData::statsMetadataComplete);
+ ipa_->runIsp.connect(this, &RPiCameraData::runIsp);
+ ipa_->embeddedComplete.connect(this, &RPiCameraData::embeddedComplete);
+ ipa_->setIsp.connect(this, &RPiCameraData::setIsp);
+ ipa_->setDelayedControls.connect(this, &RPiCameraData::setDelayedControls);
- IPASettings settings{
- .configurationFile = ipa_->configurationFile(sensor_->model() + ".json")
- };
+ IPASettings settings(ipa_->configurationFile(sensor_->model() + ".json"));
return ipa_->init(settings);
}
@@ -1185,8 +1187,8 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
static_cast<const RPiCameraConfiguration *>(config);
std::map<unsigned int, IPAStream> streamConfig;
- std::map<unsigned int, const ControlInfoMap &> entityControls;
- IPAOperationData ipaConfig = {};
+ std::map<unsigned int, ControlInfoMap> entityControls;
+ ipa::rpi::ConfigInput ipaConfig;
/* Get the device format to pass to the IPA. */
V4L2DeviceFormat sensorFormat;
@@ -1195,10 +1197,9 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
unsigned int i = 0;
for (auto const &stream : isp_) {
if (stream.isExternal()) {
- streamConfig[i++] = {
- .pixelFormat = stream.configuration().pixelFormat,
- .size = stream.configuration().size
- };
+ streamConfig[i++] = IPAStream(
+ stream.configuration().pixelFormat,
+ stream.configuration().size);
}
}
@@ -1206,17 +1207,20 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
entityControls.emplace(1, isp_[Isp::Input].dev()->controls());
/* Always send the user transform to the IPA. */
- ipaConfig.data = { static_cast<unsigned int>(config->transform) };
+ ipaConfig.transform = static_cast<unsigned int>(config->transform);
/* Allocate the lens shading table via dmaHeap and pass to the IPA. */
if (!lsTable_.isValid()) {
- lsTable_ = dmaHeap_.alloc("ls_grid", RPi::MaxLsGridSize);
+ lsTable_ = dmaHeap_.alloc("ls_grid", ipa::rpi::MaxLsGridSize);
if (!lsTable_.isValid())
return -ENOMEM;
/* Allow the IPA to mmap the LS table via the file descriptor. */
- ipaConfig.operation = RPi::IPA_CONFIG_LS_TABLE;
- ipaConfig.data.push_back(static_cast<unsigned int>(lsTable_.fd()));
+ /*
+ * \todo Investigate if mapping the lens shading table buffer
+ * could be handled with mapBuffers().
+ */
+ ipaConfig.lsTableHandle = lsTable_;
}
/* We store the CameraSensorInfo for digital zoom calculations. */
@@ -1227,35 +1231,34 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
}
/* Ready the IPA - it must know about the sensor resolution. */
- IPAOperationData result = {};
+ ipa::rpi::ConfigOutput result;
ipa_->configure(sensorInfo_, streamConfig, entityControls, ipaConfig,
- &result);
+ &result, &ret);
- if (result.operation & RPi::IPA_RESULT_CONFIG_FAILED) {
+ if (ret < 0) {
LOG(RPI, Error) << "IPA configuration failed!";
return -EPIPE;
}
- unsigned int resultIdx = 0;
- if (result.operation & RPi::IPA_RESULT_SENSOR_PARAMS) {
+ if (result.params & ipa::rpi::ConfigStaggeredWrite) {
/*
* Setup our delayed control writer with the sensor default
* gain and exposure delays.
*/
std::unordered_map<uint32_t, unsigned int> delays = {
- { V4L2_CID_ANALOGUE_GAIN, result.data[resultIdx++] },
- { V4L2_CID_EXPOSURE, result.data[resultIdx++] },
- { V4L2_CID_VBLANK, result.data[resultIdx++] }
+ { V4L2_CID_ANALOGUE_GAIN, result.sensorConfig.gainDelay },
+ { V4L2_CID_EXPOSURE, result.sensorConfig.exposureDelay },
+ { V4L2_CID_VBLANK, result.sensorConfig.vblank }
};
delayedCtrls_ = std::make_unique<DelayedControls>(unicam_[Unicam::Image].dev(), delays);
- sensorMetadata_ = result.data[resultIdx++];
+ sensorMetadata_ = result.sensorConfig.sensorMetadata;
}
- if (result.operation & RPi::IPA_RESULT_SENSOR_CTRLS) {
- ControlList &ctrls = result.controls[0];
+ if (!result.controls.empty()) {
+ ControlList &ctrls = result.controls;
unicam_[Unicam::Image].dev()->setControls(&ctrls);
}
@@ -1275,90 +1278,86 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
return 0;
}
-void RPiCameraData::queueFrameAction([[maybe_unused]] unsigned int frame,
- const IPAOperationData &action)
+void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &controls)
{
+ if (state_ == State::Stopped)
+ handleState();
+
+ FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId);
+
+ handleStreamBuffer(buffer, &isp_[Isp::Stats]);
+
+ /* Fill the Request metadata buffer with what the IPA has provided */
+ Request *request = requestQueue_.front();
+ request->metadata() = std::move(controls);
+
/*
- * The following actions can be handled when the pipeline handler is in
- * a stopped state.
+ * Also update the ScalerCrop in the metadata with what we actually
+ * used. But we must first rescale that from ISP (camera mode) pixels
+ * back into sensor native pixels.
+ *
+ * Sending this information on every frame may be helpful.
*/
- switch (action.operation) {
- case RPi::IPA_ACTION_SET_DELAYED_CTRLS: {
- const ControlList &controls = action.controls[0];
- if (!delayedCtrls_->push(controls))
- LOG(RPI, Error) << "Failed to set delayed controls";
- goto done;
+ if (updateScalerCrop_) {
+ updateScalerCrop_ = false;
+ scalerCrop_ = ispCrop_.scaledBy(sensorInfo_.analogCrop.size(),
+ sensorInfo_.outputSize);
+ scalerCrop_.translateBy(sensorInfo_.analogCrop.topLeft());
}
+ request->metadata().set(controls::ScalerCrop, scalerCrop_);
- case RPi::IPA_ACTION_V4L2_SET_ISP: {
- ControlList controls = action.controls[0];
- isp_[Isp::Input].dev()->setControls(&controls);
- goto done;
- }
- }
+ state_ = State::IpaComplete;
- if (state_ == State::Stopped)
- goto done;
+ handleState();
+}
- /*
- * The following actions must not be handled when the pipeline handler
- * is in a stopped state.
- */
- switch (action.operation) {
- case RPi::IPA_ACTION_STATS_METADATA_COMPLETE: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId);
+void RPiCameraData::runIsp(uint32_t bufferId)
+{
+ if (state_ == State::Stopped)
+ handleState();
- handleStreamBuffer(buffer, &isp_[Isp::Stats]);
+ FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId);
- /* Fill the Request metadata buffer with what the IPA has provided */
- Request *request = requestQueue_.front();
- request->metadata() = std::move(action.controls[0]);
+ LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << bufferId
+ << ", timestamp: " << buffer->metadata().timestamp;
- /*
- * Also update the ScalerCrop in the metadata with what we actually
- * used. But we must first rescale that from ISP (camera mode) pixels
- * back into sensor native pixels.
- *
- * Sending this information on every frame may be helpful.
- */
- if (updateScalerCrop_) {
- updateScalerCrop_ = false;
- scalerCrop_ = ispCrop_.scaledBy(sensorInfo_.analogCrop.size(),
- sensorInfo_.outputSize);
- scalerCrop_.translateBy(sensorInfo_.analogCrop.topLeft());
- }
- request->metadata().set(controls::ScalerCrop, scalerCrop_);
+ isp_[Isp::Input].queueBuffer(buffer);
+ ispOutputCount_ = 0;
+ handleState();
+}
- state_ = State::IpaComplete;
- break;
- }
+void RPiCameraData::embeddedComplete(uint32_t bufferId)
+{
+ if (state_ == State::Stopped)
+ handleState();
- case RPi::IPA_ACTION_EMBEDDED_COMPLETE: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId);
- handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
- break;
- }
+ FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId);
+ handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
+ handleState();
+}
- case RPi::IPA_ACTION_RUN_ISP: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId);
+void RPiCameraData::setIsp(const ControlList &controls)
+{
+ ControlList ctrls = controls;
- LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << bufferId
- << ", timestamp: " << buffer->metadata().timestamp;
+ Span<const uint8_t> s =
+ ctrls.get(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING).data();
+ bcm2835_isp_lens_shading ls =
+ *reinterpret_cast<const bcm2835_isp_lens_shading *>(s.data());
+ ls.dmabuf = lsTable_.fd();
- isp_[Isp::Input].queueBuffer(buffer);
- ispOutputCount_ = 0;
- break;
- }
+ ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&ls),
+ sizeof(ls) });
+ ctrls.set(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING, c);
- default:
- LOG(RPI, Error) << "Unknown action " << action.operation;
- break;
- }
+ isp_[Isp::Input].dev()->setControls(&ctrls);
+ handleState();
+}
-done:
+void RPiCameraData::setDelayedControls(const ControlList &controls)
+{
+ if (!delayedCtrls_->push(controls))
+ LOG(RPI, Error) << "V4L2 staggered set failed";
handleState();
}
@@ -1456,10 +1455,7 @@ void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
* application until after the IPA signals so.
*/
if (stream == &isp_[Isp::Stats]) {
- IPAOperationData op;
- op.operation = RPi::IPA_EVENT_SIGNAL_STAT_READY;
- op.data = { RPi::BufferMask::STATS | static_cast<unsigned int>(index) };
- ipa_->processEvent(op);
+ ipa_->signalStatReady(ipa::rpi::MaskStats | static_cast<unsigned int>(index));
} else {
/* Any other ISP output can be handed back to the application now. */
handleStreamBuffer(buffer, stream);
@@ -1563,7 +1559,7 @@ void RPiCameraData::handleExternalBuffer(FrameBuffer *buffer, RPi::Stream *strea
{
unsigned int id = stream->getBufferId(buffer);
- if (!(id & RPi::BufferMask::EXTERNAL_BUFFER))
+ if (!(id & ipa::rpi::MaskExternalBuffer))
return;
/* Stop the Stream object from tracking the buffer. */
@@ -1663,7 +1659,6 @@ void RPiCameraData::applyScalerCrop(const ControlList &controls)
void RPiCameraData::tryRunPipeline()
{
FrameBuffer *bayerBuffer, *embeddedBuffer;
- IPAOperationData op;
/* If any of our request or buffer queues are empty, we cannot proceed. */
if (state_ != State::Idle || requestQueue_.empty() ||
@@ -1684,9 +1679,7 @@ void RPiCameraData::tryRunPipeline()
* queue the ISP output buffer listed in the request to start the HW
* pipeline.
*/
- op.operation = RPi::IPA_EVENT_QUEUE_REQUEST;
- op.controls = { request->controls() };
- ipa_->processEvent(op);
+ ipa_->signalQueueRequest(request->controls());
/* Set our state to say the pipeline is active. */
state_ = State::Busy;
@@ -1694,14 +1687,14 @@ void RPiCameraData::tryRunPipeline()
unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerBuffer);
unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
- LOG(RPI, Debug) << "Signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE:"
+ LOG(RPI, Debug) << "Signalling signalIspPrepare:"
<< " Bayer buffer id: " << bayerId
<< " Embedded buffer id: " << embeddedId;
- op.operation = RPi::IPA_EVENT_SIGNAL_ISP_PREPARE;
- op.data = { RPi::BufferMask::EMBEDDED_DATA | embeddedId,
- RPi::BufferMask::BAYER_DATA | bayerId };
- ipa_->processEvent(op);
+ ipa::rpi::ISPConfig ispPrepare;
+ ispPrepare.embeddedbufferId = ipa::rpi::MaskEmbeddedData | embeddedId;
+ ispPrepare.bayerbufferId = ipa::rpi::MaskBayerData | bayerId;
+ ipa_->signalIspPrepare(ispPrepare);
}
bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer)
@@ -6,6 +6,8 @@
*/
#include "rpi_stream.h"
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
+
#include "libcamera/internal/log.h"
namespace libcamera {
@@ -70,7 +72,7 @@ int Stream::getBufferId(FrameBuffer *buffer) const
void Stream::setExternalBuffer(FrameBuffer *buffer)
{
- bufferMap_.emplace(RPi::BufferMask::EXTERNAL_BUFFER | id_.get(), buffer);
+ bufferMap_.emplace(ipa::rpi::MaskExternalBuffer | id_.get(), buffer);
}
void Stream::removeExternalBuffer(FrameBuffer *buffer)
@@ -78,7 +80,7 @@ void Stream::removeExternalBuffer(FrameBuffer *buffer)
int id = getBufferId(buffer);
/* Ensure we have this buffer in the stream, and it is marked external. */
- ASSERT(id != -1 && (id & RPi::BufferMask::EXTERNAL_BUFFER));
+ ASSERT(id != -1 && (id & ipa::rpi::MaskExternalBuffer));
bufferMap_.erase(id);
}
@@ -13,6 +13,7 @@
#include <vector>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/stream.h>
#include "libcamera/internal/v4l2_videodevice.h"
@@ -31,13 +32,13 @@ class Stream : public libcamera::Stream
{
public:
Stream()
- : id_(RPi::BufferMask::ID)
+ : id_(ipa::rpi::MaskID)
{
}
Stream(const char *name, MediaEntity *dev, bool importOnly = false)
: external_(false), importOnly_(importOnly), name_(name),
- dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(RPi::BufferMask::ID)
+ dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(ipa::rpi::MaskID)
{
}