@@ -8,7 +8,7 @@ module ipa.RPi;
import "include/libcamera/ipa/core.mojom";
-/* Size of the LS grid allocation. */
+/* Size of the LS grid allocation on VC4. */
const uint32 MaxLsGridSize = 0x8000;
struct SensorConfig {
@@ -19,64 +19,123 @@ struct SensorConfig {
uint32 sensorMetadata;
};
-struct IPAInitResult {
+struct InitParams {
+ bool lensPresent;
+};
+
+struct InitResult {
SensorConfig sensorConfig;
libcamera.ControlInfoMap controlInfo;
};
-struct ISPConfig {
- uint32 embeddedBufferId;
- uint32 bayerBufferId;
- bool embeddedBufferPresent;
- libcamera.ControlList controls;
- uint32 ipaContext;
- uint32 delayContext;
+struct BufferIds {
+ uint32 bayer;
+ uint32 embedded;
+ uint32 stats;
};
-struct IPAConfig {
+struct ConfigParams {
uint32 transform;
- libcamera.SharedFD lsTableHandle;
libcamera.ControlInfoMap sensorControls;
libcamera.ControlInfoMap ispControls;
libcamera.ControlInfoMap lensControls;
+ /* VC4 specific */
+ libcamera.SharedFD lsTableHandle;
};
-struct IPAConfigResult {
- float modeSensitivity;
- libcamera.ControlInfoMap controlInfo;
+struct ConfigResult {
+ float modeSensitivity;
+ libcamera.ControlInfoMap controlInfo;
+ libcamera.ControlList controls;
};
-struct StartConfig {
+struct StartResult {
libcamera.ControlList controls;
int32 dropFrameCount;
};
+struct PrepareParams {
+ BufferIds buffers;
+ libcamera.ControlList sensorControls;
+ libcamera.ControlList requestControls;
+ uint32 ipaContext;
+ uint32 delayContext;
+};
+
+struct ProcessParams {
+ BufferIds buffers;
+ uint32 ipaContext;
+};
+
interface IPARPiInterface {
- init(libcamera.IPASettings settings, bool lensPresent)
- => (int32 ret, IPAInitResult result);
- start(libcamera.ControlList controls) => (StartConfig startConfig);
+ /**
+ * \fn init()
+ * \brief Initialise the IPA
+ * \param[in] settings Camera sensor information and configuration file
+ * \param[in] params Platform specific initialisation parameters
+ * \param[out] ret 0 on success or a negative error code otherwise
+ * \param[out] result Static sensor configuration and controls available
+ *
+ * This function initialises the IPA for a particular sensor from the
+ * pipeline handler.
+ *
+ * The \a settings conveys information about the camera sensor and
+ * configuration file requested by the pipeline handler.
+ *
+ * The \a result parameter returns the sensor delay for the given camera
+ * as well as a ControlInfoMap of available controls that can be handled
+ * by the IPA.
+ */
+ init(libcamera.IPASettings settings, InitParams params)
+ => (int32 ret, InitResult result);
+
+ /**
+ * \fn start()
+ * \brief Start the IPA
+ * \param[in] controls List of control to handle
+ * \param[out] result Controls to apply and number of dropped frames
+ *
+ * This function sets the IPA to a started state.
+ *
+ * The \a controls provide a list of controls to handle immediately. The
+ * actual controls to apply on the sensor and ISP in the pipeline
+ * handler are returned in \a result.
+ *
+ * The number of convergence frames to be dropped is also returned in
+ * \a result.
+ */
+ start(libcamera.ControlList controls) => (StartResult result);
+
+ /**
+ * \fn start()
+ * \brief Stop the IPA
+ *
+ * This function sets the IPA to a stopped state.
+ */
stop();
/**
* \fn configure()
- * \brief Configure the IPA stream and sensor settings
- * \param[in] sensorInfo Camera sensor information
- * \param[in] ipaConfig Pipeline-handler-specific configuration data
- * \param[out] controls Controls to apply by the pipeline entity
- * \param[out] result Other results that the pipeline handler may require
+ * \brief Configure the IPA
+ * \param[in] sensorInfo Sensor mode configuration
+ * \param[in] params Platform configuration parameters
+ * \param[out] ret 0 on success or a negative error code otherwise
+ * \param[out] result Results of the configuration operation
*
- * This function shall be called when the camera is configured to inform
- * the IPA of the camera's streams and the sensor settings.
+ * This function configures the IPA for a particular camera
+ * configuration
*
- * The \a sensorInfo conveys information about the camera sensor settings that
- * the pipeline handler has selected for the configuration.
+ * The \a params parameter provides a list of available controls for the
+ * ISP, sensor and lens devices, and the user requested transform
+ * operation. It can also provide platform specific configuration
+ * parameters, e.g. the lens shading table memory handle for VC4.
*
- * The \a ipaConfig and \a controls parameters carry data passed by the
- * pipeline handler to the IPA and back.
+ * The \a result parameter returns the available controls for the given
+ * camera mode, a list of controls to apply to the sensor device, and
+ * the requested mode's sensitivity characteristics.
*/
- configure(libcamera.IPACameraSensorInfo sensorInfo,
- IPAConfig ipaConfig)
- => (int32 ret, libcamera.ControlList controls, IPAConfigResult result);
+ configure(libcamera.IPACameraSensorInfo sensorInfo, ConfigParams params)
+ => (int32 ret, ConfigResult result);
/**
* \fn mapBuffers()
@@ -99,7 +158,7 @@ interface IPARPiInterface {
* depending on the IPA protocol. Regardless of the protocol, all
* buffers mapped at a given time shall have unique numerical IDs.
*
- * The numerical IDs have no meaning defined by the IPA interface, and
+ * The numerical IDs have no meaning defined by the IPA interface, and
* should be treated as opaque handles by IPAs, with the only exception
* that ID zero is invalid.
*
@@ -119,17 +178,118 @@ interface IPARPiInterface {
*/
unmapBuffers(array<uint32> ids);
- [async] signalStatReady(uint32 bufferId, uint32 ipaContext);
- [async] signalQueueRequest(libcamera.ControlList controls);
- [async] signalIspPrepare(ISPConfig data);
+ /**
+ * \fn prepareIsp()
+ * \brief Prepare the ISP configuration for a frame
+ * \param[in] params Parameter set for the frame to process
+ *
+ * This function call into all the algorithms in preparation for the
+ * frame to be processed by the ISP.
+ *
+ * The \a params parameter lists the buffer IDs for the Bayer and
+ * embedded data buffers, a ControlList of sensor frame params, and
+ * a ControlList of request controls for the current frame.
+ *
+ * Additionally, \a params also contains the IPA context (ipaContext) to
+ * use as an index location to store control algorithm results, and a
+ * historical IPA context (delayContext) that was active when the sensor
+ * settings were requested by the IPA.
+ */
+ [async] prepareIsp(PrepareParams params);
+
+ /**
+ * \fn processStats()
+ * \brief Process the statistics provided by the ISP
+ * \param[in] params Parameter set for the statistics to process
+ *
+ * This function call into all the algorithms to provide the statistics
+ * generated by the ISP for the processed frame.
+ *
+ * The \a params parameter lists the buffer ID for the statistics buffer
+ * and an IPA context (ipaContext) to use as an index location to store
+ * algorithm results.
+ */
+ [async] processStats(ProcessParams params);
};
interface IPARPiEventInterface {
- statsMetadataComplete(uint32 bufferId, libcamera.ControlList controls);
- runIsp(uint32 bufferId);
- embeddedComplete(uint32 bufferId);
+ /**
+ * \fn prepareIspComplete()
+ * \brief Signal completion of \a prepareIsp
+ * \param[in] buffers Bayer and embedded buffers actioned.
+ *
+ * This asynchronous event is signalled to the pipeline handler once
+ * the \a prepareIsp signal has completed, and the ISP is ready to start
+ * processing the frame. The embedded data buffer may be recycled after
+ * this event.
+ */
+ prepareIspComplete(BufferIds buffers);
+
+ /**
+ * \fn processStatsComplete()
+ * \brief Signal completion of \a processStats
+ * \param[in] buffers Statistics buffers actioned.
+ *
+ * This asynchronous event is signalled to the pipeline handler once
+ * the \a processStats signal has completed. The statistics buffer may
+ * be recycled after this event.
+ */
+ processStatsComplete(BufferIds buffers);
+
+ /**
+ * \fn metadataReady()
+ * \brief Signal request metadata is to be merged
+ * \param[in] metadata Control list of metadata to be merged
+ *
+ * This asynchronous event is signalled to the pipeline handler once
+ * all the frame metadata has been gathered. The pipeline handler will
+ * copy or merge this metadata into the \a Request returned back to the
+ * application.
+ */
+ metadataReady(libcamera.ControlList metadata);
+
+ /**
+ * \fn setIspControls()
+ * \brief Signal ISP controls to be applied.
+ * \param[in] controls List of controls to be applied.
+ *
+ * This asynchronous event is signalled to the pipeline handler during
+ * the \a prepareISP signal after all algorithms have been run and the
+ * IPA requires ISP controls to be applied for the frame.
+ */
setIspControls(libcamera.ControlList controls);
+
+ /**
+ * \fn setDelayedControls()
+ * \brief Signal Sensor controls to be applied.
+ * \param[in] controls List of controls to be applied.
+ * \param[in] delayContext IPA context index used for this request
+ *
+ * This asynchronous event is signalled to the pipeline handler when
+ * the IPA requires sensor specific controls (e.g. shutter speed, gain,
+ * blanking) to be applied.
+ */
setDelayedControls(libcamera.ControlList controls, uint32 delayContext);
+
+ /**
+ * \fn setLensControls()
+ * \brief Signal lens controls to be applied.
+ * \param[in] controls List of controls to be applied.
+ *
+ * This asynchronous event is signalled to the pipeline handler when
+ * the IPA requires a lens movement control to be applied.
+ */
setLensControls(libcamera.ControlList controls);
+
+ /**
+ * \fn setCameraTimeout()
+ * \brief Request a watchdog timeout value to use
+ * \param[in] maxFrameLengthMs Timeout value in ms
+ *
+ * This asynchronous event is used by the IPA to inform the pipeline
+ * handler of an acceptable watchdog timer value to use for the sensor
+ * stream. This value is based on the history of frame lengths requested
+ * by the IPA.
+ */
setCameraTimeout(uint32 maxFrameLengthMs);
};
@@ -136,30 +136,28 @@ public:
munmap(lsTable_, MaxLsGridSize);
}
- int init(const IPASettings &settings, bool lensPresent, IPAInitResult *result) override;
- void start(const ControlList &controls, StartConfig *startConfig) override;
+ int init(const IPASettings &settings, const InitParams ¶ms, InitResult *result) override;
+ void start(const ControlList &controls, StartResult *result) override;
void stop() override {}
- int configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &data,
- ControlList *controls, IPAConfigResult *result) override;
+ int configure(const IPACameraSensorInfo &sensorInfo, const ConfigParams ¶ms,
+ ConfigResult *result) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void signalStatReady(const uint32_t bufferId, uint32_t ipaContext) override;
- void signalQueueRequest(const ControlList &controls) override;
- void signalIspPrepare(const ISPConfig &data) override;
+ void prepareIsp(const PrepareParams ¶ms) override;
+ void processStats(const ProcessParams ¶ms) override;
private:
void setMode(const IPACameraSensorInfo &sensorInfo);
bool validateSensorControls();
bool validateIspControls();
bool validateLensControls();
- void queueRequest(const ControlList &controls);
- void returnEmbeddedBuffer(unsigned int bufferId);
- void prepareISP(const ISPConfig &data);
+ void applyControls(const ControlList &controls);
+ void prepare(const PrepareParams ¶ms);
void reportMetadata(unsigned int ipaContext);
void fillDeviceStatus(const ControlList &sensorControls, unsigned int ipaContext);
RPiController::StatisticsPtr fillStatistics(bcm2835_isp_stats *stats) const;
- void processStats(unsigned int bufferId, unsigned int ipaContext);
+ void process(unsigned int bufferId, unsigned int ipaContext);
void setCameraTimeoutValue();
void applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
@@ -229,7 +227,7 @@ private:
Duration lastTimeout_;
};
-int IPARPi::init(const IPASettings &settings, bool lensPresent, IPAInitResult *result)
+int IPARPi::init(const IPASettings &settings, const InitParams ¶ms, InitResult *result)
{
/*
* Load the "helper" for this sensor. This tells us all the device specific stuff
@@ -274,7 +272,7 @@ int IPARPi::init(const IPASettings &settings, bool lensPresent, IPAInitResult *r
return -EINVAL;
}
- lensPresent_ = lensPresent;
+ lensPresent_ = params.lensPresent;
controller_.initialise();
@@ -287,14 +285,13 @@ int IPARPi::init(const IPASettings &settings, bool lensPresent, IPAInitResult *r
return 0;
}
-void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
+void IPARPi::start(const ControlList &controls, StartResult *result)
{
RPiController::Metadata metadata;
- ASSERT(startConfig);
if (!controls.empty()) {
/* We have been given some controls to action before start. */
- queueRequest(controls);
+ applyControls(controls);
}
controller_.switchMode(mode_, &metadata);
@@ -313,7 +310,7 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
if (agcStatus.shutterTime && agcStatus.analogueGain) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
- startConfig->controls = std::move(ctrls);
+ result->controls = std::move(ctrls);
setCameraTimeoutValue();
}
@@ -360,7 +357,7 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
mistrustCount_ = helper_->mistrustFramesModeSwitch();
}
- startConfig->dropFrameCount = dropFrameCount_;
+ result->dropFrameCount = dropFrameCount_;
firstStart_ = false;
lastRunTimestamp_ = 0;
@@ -435,11 +432,11 @@ void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
mode_.minFrameDuration, mode_.maxFrameDuration);
}
-int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &ipaConfig,
- ControlList *controls, IPAConfigResult *result)
+int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const ConfigParams ¶ms,
+ ConfigResult *result)
{
- sensorCtrls_ = ipaConfig.sensorControls;
- ispCtrls_ = ipaConfig.ispControls;
+ sensorCtrls_ = params.sensorControls;
+ ispCtrls_ = params.ispControls;
if (!validateSensorControls()) {
LOG(IPARPI, Error) << "Sensor control validation failed.";
@@ -452,7 +449,7 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &ip
}
if (lensPresent_) {
- lensCtrls_ = ipaConfig.lensControls;
+ lensCtrls_ = params.lensControls;
if (!validateLensControls()) {
LOG(IPARPI, Warning) << "Lens validation failed, "
<< "no lens control will be available.";
@@ -466,10 +463,10 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &ip
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
- mode_.transform = static_cast<libcamera::Transform>(ipaConfig.transform);
+ mode_.transform = static_cast<libcamera::Transform>(params.transform);
/* Store the lens shading table pointer and handle if available. */
- if (ipaConfig.lsTableHandle.isValid()) {
+ if (params.lsTableHandle.isValid()) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
munmap(lsTable_, MaxLsGridSize);
@@ -477,7 +474,7 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &ip
}
/* Map the LS table buffer into user space. */
- lsTableHandle_ = std::move(ipaConfig.lsTableHandle);
+ lsTableHandle_ = std::move(params.lsTableHandle);
if (lsTableHandle_.isValid()) {
lsTable_ = mmap(nullptr, MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.get(), 0);
@@ -512,8 +509,7 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo, const IPAConfig &ip
applyAGC(&agcStatus, ctrls);
}
- ASSERT(controls);
- *controls = std::move(ctrls);
+ result->controls = std::move(ctrls);
/*
* Apply the correct limits to the exposure, gain and frame duration controls
@@ -560,37 +556,34 @@ void IPARPi::unmapBuffers(const std::vector<unsigned int> &ids)
}
}
-void IPARPi::signalStatReady(uint32_t bufferId, uint32_t ipaContext)
+void IPARPi::processStats(const ProcessParams ¶ms)
{
- unsigned int context = ipaContext % rpiMetadata_.size();
+ unsigned int context = params.ipaContext % rpiMetadata_.size();
if (++checkCount_ != frameCount_) /* assert here? */
LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
if (processPending_ && frameCount_ > mistrustCount_)
- processStats(bufferId, context);
+ process(params.buffers.stats, context);
reportMetadata(context);
-
- statsMetadataComplete.emit(bufferId, libcameraMetadata_);
+ processStatsComplete.emit(params.buffers);
}
-void IPARPi::signalQueueRequest(const ControlList &controls)
-{
- queueRequest(controls);
-}
-void IPARPi::signalIspPrepare(const ISPConfig &data)
+void IPARPi::prepareIsp(const PrepareParams ¶ms)
{
+ applyControls(params.requestControls);
+
/*
* At start-up, or after a mode-switch, we may want to
* avoid running the control algos for a few frames in case
* they are "unreliable".
*/
- prepareISP(data);
+ prepare(params);
frameCount_++;
/* Ready to push the input buffer into the ISP. */
- runIsp.emit(data.bayerBufferId);
+ prepareIspComplete.emit(params.buffers);
}
void IPARPi::reportMetadata(unsigned int ipaContext)
@@ -703,6 +696,8 @@ void IPARPi::reportMetadata(unsigned int ipaContext)
libcameraMetadata_.set(controls::AfState, s);
libcameraMetadata_.set(controls::AfPauseState, p);
}
+
+ metadataReady.emit(libcameraMetadata_);
}
bool IPARPi::validateSensorControls()
@@ -826,7 +821,7 @@ static const std::map<int32_t, RPiController::AfAlgorithm::AfPause> AfPauseTable
{ controls::AfPauseResume, RPiController::AfAlgorithm::AfPauseResume },
};
-void IPARPi::queueRequest(const ControlList &controls)
+void IPARPi::applyControls(const ControlList &controls)
{
using RPiController::AfAlgorithm;
@@ -1256,27 +1251,22 @@ void IPARPi::queueRequest(const ControlList &controls)
}
}
-void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
+void IPARPi::prepare(const PrepareParams ¶ms)
{
- embeddedComplete.emit(bufferId);
-}
-
-void IPARPi::prepareISP(const ISPConfig &data)
-{
- int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp).value_or(0);
- unsigned int ipaContext = data.ipaContext % rpiMetadata_.size();
+ int64_t frameTimestamp = params.sensorControls.get(controls::SensorTimestamp).value_or(0);
+ unsigned int ipaContext = params.ipaContext % rpiMetadata_.size();
RPiController::Metadata &rpiMetadata = rpiMetadata_[ipaContext];
Span<uint8_t> embeddedBuffer;
rpiMetadata.clear();
- fillDeviceStatus(data.controls, ipaContext);
+ fillDeviceStatus(params.sensorControls, ipaContext);
- if (data.embeddedBufferPresent) {
+ if (params.buffers.embedded) {
/*
* Pipeline handler has supplied us with an embedded data buffer,
* we must pass it to the CamHelper for parsing.
*/
- auto it = buffers_.find(data.embeddedBufferId);
+ auto it = buffers_.find(params.buffers.embedded);
ASSERT(it != buffers_.end());
embeddedBuffer = it->second.planes()[0];
}
@@ -1288,7 +1278,7 @@ void IPARPi::prepareISP(const ISPConfig &data)
* metadata.
*/
AgcStatus agcStatus;
- RPiController::Metadata &delayedMetadata = rpiMetadata_[data.delayContext];
+ RPiController::Metadata &delayedMetadata = rpiMetadata_[params.delayContext];
if (!delayedMetadata.get<AgcStatus>("agc.status", agcStatus))
rpiMetadata.set("agc.delayed_status", agcStatus);
@@ -1298,10 +1288,6 @@ void IPARPi::prepareISP(const ISPConfig &data)
*/
helper_->prepare(embeddedBuffer, rpiMetadata);
- /* Done with embedded data now, return to pipeline handler asap. */
- if (data.embeddedBufferPresent)
- returnEmbeddedBuffer(data.embeddedBufferId);
-
/* Allow a 10% margin on the comparison below. */
Duration delta = (frameTimestamp - lastRunTimestamp_) * 1.0ns;
if (lastRunTimestamp_ && frameCount_ > dropFrameCount_ &&
@@ -1445,7 +1431,7 @@ RPiController::StatisticsPtr IPARPi::fillStatistics(bcm2835_isp_stats *stats) co
return statistics;
}
-void IPARPi::processStats(unsigned int bufferId, unsigned int ipaContext)
+void IPARPi::process(unsigned int bufferId, unsigned int ipaContext)
{
RPiController::Metadata &rpiMetadata = rpiMetadata_[ipaContext];
@@ -200,15 +200,15 @@ public:
void freeBuffers();
void frameStarted(uint32_t sequence);
- int loadIPA(ipa::RPi::IPAInitResult *result);
- int configureIPA(const CameraConfiguration *config, ipa::RPi::IPAConfigResult *result);
+ int loadIPA(ipa::RPi::InitResult *result);
+ int configureIPA(const CameraConfiguration *config, ipa::RPi::ConfigResult *result);
int loadPipelineConfiguration();
void enumerateVideoDevices(MediaLink *link);
- void statsMetadataComplete(uint32_t bufferId, const ControlList &controls);
- void runIsp(uint32_t bufferId);
- void embeddedComplete(uint32_t bufferId);
+ void processStatsComplete(const ipa::RPi::BufferIds &buffers);
+ void metadataReady(const ControlList &metadata);
+ void prepareIspComplete(const ipa::RPi::BufferIds &buffers);
void setIspControls(const ControlList &controls);
void setDelayedControls(const ControlList &controls, uint32_t delayContext);
void setLensControls(const ControlList &controls);
@@ -238,7 +238,7 @@ public:
/* The vector below is just for convenience when iterating over all streams. */
std::vector<RPi::Stream *> streams_;
/* Stores the ids of the buffers mapped in the IPA. */
- std::unordered_set<unsigned int> ipaBuffers_;
+ std::unordered_set<unsigned int> bufferIds_;
/*
* Stores a cascade of Video Mux or Bridge devices between the sensor and
* Unicam together with media link across the entities.
@@ -1000,7 +1000,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
data->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &crop);
- ipa::RPi::IPAConfigResult result;
+ ipa::RPi::ConfigResult result;
ret = data->configureIPA(config, &result);
if (ret)
LOG(RPI, Error) << "Failed to configure the IPA: " << ret;
@@ -1117,17 +1117,17 @@ int PipelineHandlerRPi::start(Camera *camera, const ControlList *controls)
data->applyScalerCrop(*controls);
/* Start the IPA. */
- ipa::RPi::StartConfig startConfig;
+ ipa::RPi::StartResult result;
data->ipa_->start(controls ? *controls : ControlList{ controls::controls },
- &startConfig);
+ &result);
/* Apply any gain/exposure settings that the IPA may have passed back. */
- if (!startConfig.controls.empty())
- data->setSensorControls(startConfig.controls);
+ if (!result.controls.empty())
+ data->setSensorControls(result.controls);
/* Configure the number of dropped frames required on startup. */
data->dropFrameCount_ = data->config_.disableStartupFrameDrops
- ? 0 : startConfig.dropFrameCount;
+ ? 0 : result.dropFrameCount;
for (auto const stream : data->streams_)
stream->resetBuffers();
@@ -1358,7 +1358,7 @@ int PipelineHandlerRPi::registerCamera(MediaDevice *unicam, MediaDevice *isp, Me
data->sensorFormats_ = populateSensorFormats(data->sensor_);
- ipa::RPi::IPAInitResult result;
+ ipa::RPi::InitResult result;
if (data->loadIPA(&result)) {
LOG(RPI, Error) << "Failed to load a suitable IPA library";
return -EINVAL;
@@ -1599,7 +1599,7 @@ int PipelineHandlerRPi::prepareBuffers(Camera *camera)
void PipelineHandlerRPi::mapBuffers(Camera *camera, const RPi::BufferMap &buffers, unsigned int mask)
{
RPiCameraData *data = cameraData(camera);
- std::vector<IPABuffer> ipaBuffers;
+ std::vector<IPABuffer> bufferIds;
/*
* Link the FrameBuffers with the id (key value) in the map stored in
* the RPi stream object - along with an identifier mask.
@@ -1608,12 +1608,12 @@ void PipelineHandlerRPi::mapBuffers(Camera *camera, const RPi::BufferMap &buffer
* handler and the IPA.
*/
for (auto const &it : buffers) {
- ipaBuffers.push_back(IPABuffer(mask | it.first,
+ bufferIds.push_back(IPABuffer(mask | it.first,
it.second->planes()));
- data->ipaBuffers_.insert(mask | it.first);
+ data->bufferIds_.insert(mask | it.first);
}
- data->ipa_->mapBuffers(ipaBuffers);
+ data->ipa_->mapBuffers(bufferIds);
}
void RPiCameraData::freeBuffers()
@@ -1623,10 +1623,10 @@ void RPiCameraData::freeBuffers()
* Copy the buffer ids from the unordered_set to a vector to
* pass to the IPA.
*/
- std::vector<unsigned int> ipaBuffers(ipaBuffers_.begin(),
- ipaBuffers_.end());
- ipa_->unmapBuffers(ipaBuffers);
- ipaBuffers_.clear();
+ std::vector<unsigned int> bufferIds(bufferIds_.begin(),
+ bufferIds_.end());
+ ipa_->unmapBuffers(bufferIds);
+ bufferIds_.clear();
}
for (auto const stream : streams_)
@@ -1643,16 +1643,16 @@ void RPiCameraData::frameStarted(uint32_t sequence)
delayedCtrls_->applyControls(sequence);
}
-int RPiCameraData::loadIPA(ipa::RPi::IPAInitResult *result)
+int RPiCameraData::loadIPA(ipa::RPi::InitResult *result)
{
ipa_ = IPAManager::createIPA<ipa::RPi::IPAProxyRPi>(pipe(), 1, 1);
if (!ipa_)
return -ENOENT;
- ipa_->statsMetadataComplete.connect(this, &RPiCameraData::statsMetadataComplete);
- ipa_->runIsp.connect(this, &RPiCameraData::runIsp);
- ipa_->embeddedComplete.connect(this, &RPiCameraData::embeddedComplete);
+ ipa_->processStatsComplete.connect(this, &RPiCameraData::processStatsComplete);
+ ipa_->prepareIspComplete.connect(this, &RPiCameraData::prepareIspComplete);
+ ipa_->metadataReady.connect(this, &RPiCameraData::metadataReady);
ipa_->setIspControls.connect(this, &RPiCameraData::setIspControls);
ipa_->setDelayedControls.connect(this, &RPiCameraData::setDelayedControls);
ipa_->setLensControls.connect(this, &RPiCameraData::setLensControls);
@@ -1674,23 +1674,25 @@ int RPiCameraData::loadIPA(ipa::RPi::IPAInitResult *result)
}
IPASettings settings(configurationFile, sensor_->model());
+ ipa::RPi::InitParams params;
- return ipa_->init(settings, !!sensor_->focusLens(), result);
+ params.lensPresent = !!sensor_->focusLens();
+ return ipa_->init(settings, params, result);
}
-int RPiCameraData::configureIPA(const CameraConfiguration *config, ipa::RPi::IPAConfigResult *result)
+int RPiCameraData::configureIPA(const CameraConfiguration *config, ipa::RPi::ConfigResult *result)
{
std::map<unsigned int, ControlInfoMap> entityControls;
- ipa::RPi::IPAConfig ipaConfig;
+ ipa::RPi::ConfigParams params;
/* \todo Move passing of ispControls and lensControls to ipa::init() */
- ipaConfig.sensorControls = sensor_->controls();
- ipaConfig.ispControls = isp_[Isp::Input].dev()->controls();
+ params.sensorControls = sensor_->controls();
+ params.ispControls = isp_[Isp::Input].dev()->controls();
if (sensor_->focusLens())
- ipaConfig.lensControls = sensor_->focusLens()->controls();
+ params.lensControls = sensor_->focusLens()->controls();
/* Always send the user transform to the IPA. */
- ipaConfig.transform = static_cast<unsigned int>(config->transform);
+ params.transform = static_cast<unsigned int>(config->transform);
/* Allocate the lens shading table via dmaHeap and pass to the IPA. */
if (!lsTable_.isValid()) {
@@ -1703,7 +1705,7 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config, ipa::RPi::IPA
* \todo Investigate if mapping the lens shading table buffer
* could be handled with mapBuffers().
*/
- ipaConfig.lsTableHandle = lsTable_;
+ params.lsTableHandle = lsTable_;
}
/* We store the IPACameraSensorInfo for digital zoom calculations. */
@@ -1714,15 +1716,14 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config, ipa::RPi::IPA
}
/* Ready the IPA - it must know about the sensor resolution. */
- ControlList controls;
- ret = ipa_->configure(sensorInfo_, ipaConfig, &controls, result);
+ ret = ipa_->configure(sensorInfo_, params, result);
if (ret < 0) {
LOG(RPI, Error) << "IPA configuration failed!";
return -EPIPE;
}
- if (!controls.empty())
- setSensorControls(controls);
+ if (!result->controls.empty())
+ setSensorControls(result->controls);
return 0;
}
@@ -1883,24 +1884,32 @@ void RPiCameraData::enumerateVideoDevices(MediaLink *link)
}
}
-void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &controls)
+void RPiCameraData::processStatsComplete(const ipa::RPi::BufferIds &buffers)
{
if (!isRunning())
return;
- FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId & RPi::MaskID);
+ FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(buffers.stats & RPi::MaskID);
handleStreamBuffer(buffer, &isp_[Isp::Stats]);
+ state_ = State::IpaComplete;
+ handleState();
+}
+
+void RPiCameraData::metadataReady(const ControlList &metadata)
+{
+ if (!isRunning())
+ return;
/* Add to the Request metadata buffer what the IPA has provided. */
Request *request = requestQueue_.front();
- request->metadata().merge(controls);
+ request->metadata().merge(metadata);
/*
* Inform the sensor of the latest colour gains if it has the
* V4L2_CID_NOTIFY_GAINS control (which means notifyGainsUnity_ is set).
*/
- const auto &colourGains = controls.get(libcamera::controls::ColourGains);
+ const auto &colourGains = metadata.get(libcamera::controls::ColourGains);
if (notifyGainsUnity_ && colourGains) {
/* The control wants linear gains in the order B, Gb, Gr, R. */
ControlList ctrls(sensor_->controls());
@@ -1914,33 +1923,29 @@ void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &
sensor_->setControls(&ctrls);
}
-
- state_ = State::IpaComplete;
- handleState();
}
-void RPiCameraData::runIsp(uint32_t bufferId)
+void RPiCameraData::prepareIspComplete(const ipa::RPi::BufferIds &buffers)
{
+ unsigned int embeddedId = buffers.embedded & RPi::MaskID;
+ unsigned int bayer = buffers.bayer & RPi::MaskID;
+ FrameBuffer *buffer;
+
if (!isRunning())
return;
- FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId & RPi::MaskID);
-
- LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << (bufferId & RPi::MaskID)
+ buffer = unicam_[Unicam::Image].getBuffers().at(bayer & RPi::MaskID);
+ LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << (bayer & RPi::MaskID)
<< ", timestamp: " << buffer->metadata().timestamp;
isp_[Isp::Input].queueBuffer(buffer);
ispOutputCount_ = 0;
- handleState();
-}
-void RPiCameraData::embeddedComplete(uint32_t bufferId)
-{
- if (!isRunning())
- return;
+ if (sensorMetadata_ && embeddedId) {
+ buffer = unicam_[Unicam::Embedded].getBuffers().at(embeddedId & RPi::MaskID);
+ handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
+ }
- FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId & RPi::MaskID);
- handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
handleState();
}
@@ -2116,8 +2121,10 @@ void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
* application until after the IPA signals so.
*/
if (stream == &isp_[Isp::Stats]) {
- ipa_->signalStatReady(RPi::MaskStats | static_cast<unsigned int>(index),
- requestQueue_.front()->sequence());
+ ipa::RPi::ProcessParams params;
+ params.buffers.stats = index | RPi::MaskStats;
+ params.ipaContext = requestQueue_.front()->sequence();
+ ipa_->processStats(params);
} else {
/* Any other ISP output can be handed back to the application now. */
handleStreamBuffer(buffer, stream);
@@ -2344,38 +2351,30 @@ void RPiCameraData::tryRunPipeline()
request->metadata().clear();
fillRequestMetadata(bayerFrame.controls, request);
- /*
- * Process all the user controls by the IPA. Once this is complete, we
- * queue the ISP output buffer listed in the request to start the HW
- * pipeline.
- */
- ipa_->signalQueueRequest(request->controls());
-
/* Set our state to say the pipeline is active. */
state_ = State::Busy;
- unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
+ unsigned int bayer = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
- LOG(RPI, Debug) << "Signalling signalIspPrepare:"
- << " Bayer buffer id: " << bayerId;
+ LOG(RPI, Debug) << "Signalling prepareIsp:"
+ << " Bayer buffer id: " << bayer;
- ipa::RPi::ISPConfig ispPrepare;
- ispPrepare.bayerBufferId = RPi::MaskBayerData | bayerId;
- ispPrepare.controls = std::move(bayerFrame.controls);
- ispPrepare.ipaContext = request->sequence();
- ispPrepare.delayContext = bayerFrame.delayContext;
+ ipa::RPi::PrepareParams params;
+ params.buffers.bayer = RPi::MaskBayerData | bayer;
+ params.sensorControls = std::move(bayerFrame.controls);
+ params.requestControls = request->controls();
+ params.ipaContext = request->sequence();
+ params.delayContext = bayerFrame.delayContext;
if (embeddedBuffer) {
unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
- ispPrepare.embeddedBufferId = RPi::MaskEmbeddedData | embeddedId;
- ispPrepare.embeddedBufferPresent = true;
-
- LOG(RPI, Debug) << "Signalling signalIspPrepare:"
+ params.buffers.embedded = RPi::MaskEmbeddedData | embeddedId;
+ LOG(RPI, Debug) << "Signalling prepareIsp:"
<< " Embedded buffer id: " << embeddedId;
}
- ipa_->signalIspPrepare(ispPrepare);
+ ipa_->prepareIsp(params);
}
bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer)