@@ -31,14 +31,14 @@ interface IPAIPU3Interface {
unmapBuffers(array<uint32> ids);
[async] queueRequest(uint32 frame, libcamera.ControlList controls);
- [async] fillParamsBuffer(uint32 frame, uint32 bufferId);
- [async] processStatsBuffer(uint32 frame, int64 frameTimestamp,
- uint32 bufferId, libcamera.ControlList sensorControls);
+ [async] computeParams(uint32 frame, uint32 bufferId);
+ [async] processStats(uint32 frame, int64 frameTimestamp,
+ uint32 bufferId, libcamera.ControlList sensorControls);
};
interface IPAIPU3EventInterface {
setSensorControls(uint32 frame, libcamera.ControlList sensorControls,
libcamera.ControlList lensControls);
- paramsBufferReady(uint32 frame);
+ paramsComputed(uint32 frame);
metadataReady(uint32 frame, libcamera.ControlList metadata);
};
@@ -31,13 +31,13 @@ interface IPARkISP1Interface {
unmapBuffers(array<uint32> ids);
[async] queueRequest(uint32 frame, libcamera.ControlList reqControls);
- [async] fillParamsBuffer(uint32 frame, uint32 bufferId);
- [async] processStatsBuffer(uint32 frame, uint32 bufferId,
- libcamera.ControlList sensorControls);
+ [async] computeParams(uint32 frame, uint32 bufferId);
+ [async] processStats(uint32 frame, uint32 bufferId,
+ libcamera.ControlList sensorControls);
};
interface IPARkISP1EventInterface {
- paramsBufferReady(uint32 frame, uint32 bytesused);
+ paramsComputed(uint32 frame, uint32 bytesused);
setSensorControls(uint32 frame, libcamera.ControlList sensorControls);
metadataReady(uint32 frame, libcamera.ControlList metadata);
};
@@ -24,7 +24,7 @@ interface IPASoftInterface {
=> (int32 ret);
[async] queueRequest(uint32 frame, libcamera.ControlList sensorControls);
- [async] fillParamsBuffer(uint32 frame);
+ [async] computeParams(uint32 frame);
[async] processStats(uint32 frame,
uint32 bufferId,
libcamera.ControlList sensorControls);
@@ -47,9 +47,9 @@ interface IPAVimcInterface {
* interface functions that mimick how other pipeline handlers typically
* handle parameters at runtime.
*/
- [async] fillParamsBuffer(uint32 frame, uint32 bufferId);
+ [async] computeParams(uint32 frame, uint32 bufferId);
};
interface IPAVimcEventInterface {
- paramsBufferReady(uint32 bufferId, [flags] TestFlag flags);
+ paramsComputed(uint32 bufferId, [flags] TestFlag flags);
};
@@ -27,8 +27,8 @@ from applications, and managing events from the pipeline handler.
└─┬───┬───┬──────┬────┬────┬────┬─┴────▼─┬──┘ 1: init()
│ │ │ │ ▲ │ ▲ │ ▲ │ ▲ │ 2: configure()
│1 │2 │3 │4│ │4│ │4│ │4│ │5 3: mapBuffers(), start()
- │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), fillParamsBuffer(), processStatsBuffer()
- ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsBufferReady, metadataReady Signals
+ │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), computeParams(), processStats()
+ ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsComputed, metadataReady Signals
┌──────────────────┴────┴────┴────┴─────────┐ 5: stop(), unmapBuffers()
│ IPU3 IPA │
│ ┌───────────────────────┐ │
@@ -104,8 +104,8 @@ to operate when running:
- configure()
- queueRequest()
-- fillParamsBuffer()
-- processStatsBuffer()
+- computeParams()
+- processStats()
The configuration phase allows the pipeline-handler to inform the IPA of
the current stream configurations, which is then passed into each
@@ -119,7 +119,7 @@ When configured, the IPA is notified by the pipeline handler of the
Camera ``start()`` event, after which incoming requests will be queued
for processing, requiring a parameter buffer (``ipu3_uapi_params``) to
be populated for the ImgU. This is given to the IPA through
-``fillParamsBuffer()``, and then passed directly to each algorithm
+``computeParams()``, and then passed directly to each algorithm
through the ``prepare()`` call allowing the ISP configuration to be
updated for the needs of each component that the algorithm is
responsible for.
@@ -129,7 +129,7 @@ structure that it modifies, and it should take care to ensure that any
structure set by a use flag is fully initialised to suitable values.
The parameter buffer is returned to the pipeline handler through the
-``paramsBufferReady`` signal, and from there queued to the ImgU along
+``paramsComputed`` signal, and from there queued to the ImgU along
with a raw frame captured with the CIO2.
Post-frame completion
@@ -138,7 +138,7 @@ Post-frame completion
When the capture of an image is completed, and successfully processed
through the ImgU, the generated statistics buffer
(``ipu3_uapi_stats_3a``) is given to the IPA through
-``processStatsBuffer()``. This provides the IPA with an opportunity to
+``processStats()``. This provides the IPA with an opportunity to
examine the results of the ISP and run the calculations required by each
algorithm on the new data. The algorithms may require context from the
operations of other algorithms, for example, the AWB might choose to use
@@ -87,14 +87,14 @@ namespace ipa::ipu3 {
* parameter buffer, and adapting the settings of the sensor attached to the
* IPU3 CIO2 through sensor-specific V4L2 controls.
*
- * In fillParamsBuffer(), we populate the ImgU parameter buffer with
+ * In computeParams(), we populate the ImgU parameter buffer with
* settings to configure the device in preparation for handling the frame
* queued in the Request.
*
* When the frame has completed processing, the ImgU will generate a statistics
- * buffer which is given to the IPA with processStatsBuffer(). In this we run the
+ * buffer which is given to the IPA with processStats(). In this we run the
* algorithms to parse the statistics and cache any results for the next
- * fillParamsBuffer() call.
+ * computeParams() call.
*
* The individual algorithms are split into modular components that are called
* iteratively to allow them to process statistics from the ImgU in the order
@@ -155,10 +155,10 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(const uint32_t frame, const ControlList &controls) override;
- void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override;
- void processStatsBuffer(const uint32_t frame, const int64_t frameTimestamp,
- const uint32_t bufferId,
- const ControlList &sensorControls) override;
+ void computeParams(const uint32_t frame, const uint32_t bufferId) override;
+ void processStats(const uint32_t frame, const int64_t frameTimestamp,
+ const uint32_t bufferId,
+ const ControlList &sensorControls) override;
protected:
std::string logPrefix() const override;
@@ -538,7 +538,7 @@ void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids)
* Algorithms are expected to fill the IPU3 parameter buffer for the next
* frame given their most recent processing of the ImgU statistics.
*/
-void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
+void IPAIPU3::computeParams(const uint32_t frame, const uint32_t bufferId)
{
auto it = buffers_.find(bufferId);
if (it == buffers_.end()) {
@@ -566,7 +566,7 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
for (auto const &algo : algorithms())
algo->prepare(context_, frame, frameContext, params);
- paramsBufferReady.emit(frame);
+ paramsComputed.emit(frame);
}
/**
@@ -580,9 +580,9 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
* statistics are passed to each algorithm module to run their calculations and
* update their state accordingly.
*/
-void IPAIPU3::processStatsBuffer(const uint32_t frame,
- [[maybe_unused]] const int64_t frameTimestamp,
- const uint32_t bufferId, const ControlList &sensorControls)
+void IPAIPU3::processStats(const uint32_t frame,
+ [[maybe_unused]] const int64_t frameTimestamp,
+ const uint32_t bufferId, const ControlList &sensorControls)
{
auto it = buffers_.find(bufferId);
if (it == buffers_.end()) {
@@ -65,9 +65,9 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(const uint32_t frame, const ControlList &controls) override;
- void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override;
- void processStatsBuffer(const uint32_t frame, const uint32_t bufferId,
- const ControlList &sensorControls) override;
+ void computeParams(const uint32_t frame, const uint32_t bufferId) override;
+ void processStats(const uint32_t frame, const uint32_t bufferId,
+ const ControlList &sensorControls) override;
protected:
std::string logPrefix() const override;
@@ -335,7 +335,7 @@ void IPARkISP1::queueRequest(const uint32_t frame, const ControlList &controls)
}
}
-void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
+void IPARkISP1::computeParams(const uint32_t frame, const uint32_t bufferId)
{
IPAFrameContext &frameContext = context_.frameContexts.get(frame);
@@ -345,11 +345,11 @@ void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
for (auto const &algo : algorithms())
algo->prepare(context_, frame, frameContext, ¶ms);
- paramsBufferReady.emit(frame, params.size());
+ paramsComputed.emit(frame, params.size());
}
-void IPARkISP1::processStatsBuffer(const uint32_t frame, const uint32_t bufferId,
- const ControlList &sensorControls)
+void IPARkISP1::processStats(const uint32_t frame, const uint32_t bufferId,
+ const ControlList &sensorControls)
{
IPAFrameContext &frameContext = context_.frameContexts.get(frame);
@@ -57,7 +57,7 @@ public:
void stop() override;
void queueRequest(const uint32_t frame, const ControlList &controls) override;
- void fillParamsBuffer(const uint32_t frame) override;
+ void computeParams(const uint32_t frame) override;
void processStats(const uint32_t frame, const uint32_t bufferId,
const ControlList &sensorControls) override;
@@ -272,7 +272,7 @@ void IPASoftSimple::queueRequest(const uint32_t frame, const ControlList &contro
algo->queueRequest(context_, frame, frameContext, controls);
}
-void IPASoftSimple::fillParamsBuffer(const uint32_t frame)
+void IPASoftSimple::computeParams(const uint32_t frame)
{
IPAFrameContext &frameContext = context_.frameContexts.get(frame);
for (auto const &algo : algorithms())
@@ -48,7 +48,7 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(uint32_t frame, const ControlList &controls) override;
- void fillParamsBuffer(uint32_t frame, uint32_t bufferId) override;
+ void computeParams(uint32_t frame, uint32_t bufferId) override;
private:
void initTrace();
@@ -150,7 +150,7 @@ void IPAVimc::queueRequest([[maybe_unused]] uint32_t frame,
{
}
-void IPAVimc::fillParamsBuffer([[maybe_unused]] uint32_t frame, uint32_t bufferId)
+void IPAVimc::computeParams([[maybe_unused]] uint32_t frame, uint32_t bufferId)
{
auto it = buffers_.find(bufferId);
if (it == buffers_.end()) {
@@ -159,7 +159,7 @@ void IPAVimc::fillParamsBuffer([[maybe_unused]] uint32_t frame, uint32_t bufferI
}
Flags<ipa::vimc::TestFlag> flags;
- paramsBufferReady.emit(bufferId, flags);
+ paramsComputed.emit(bufferId, flags);
}
void IPAVimc::initTrace()
@@ -88,7 +88,7 @@ public:
private:
void metadataReady(unsigned int id, const ControlList &metadata);
- void paramsBufferReady(unsigned int id);
+ void paramsComputed(unsigned int id);
void setSensorControls(unsigned int id, const ControlList &sensorControls,
const ControlList &lensControls);
};
@@ -1156,7 +1156,7 @@ int IPU3CameraData::loadIPA()
return -ENOENT;
ipa_->setSensorControls.connect(this, &IPU3CameraData::setSensorControls);
- ipa_->paramsBufferReady.connect(this, &IPU3CameraData::paramsBufferReady);
+ ipa_->paramsComputed.connect(this, &IPU3CameraData::paramsComputed);
ipa_->metadataReady.connect(this, &IPU3CameraData::metadataReady);
/*
@@ -1217,7 +1217,7 @@ void IPU3CameraData::setSensorControls([[maybe_unused]] unsigned int id,
focusLens->setFocusPosition(focusValue.get<int32_t>());
}
-void IPU3CameraData::paramsBufferReady(unsigned int id)
+void IPU3CameraData::paramsComputed(unsigned int id)
{
IPU3Frames::Info *info = frameInfos_.find(id);
if (!info)
@@ -1328,7 +1328,7 @@ void IPU3CameraData::cio2BufferReady(FrameBuffer *buffer)
if (request->findBuffer(&rawStream_))
pipe()->completeBuffer(request, buffer);
- ipa_->fillParamsBuffer(info->id, info->paramBuffer->cookie());
+ ipa_->computeParams(info->id, info->paramBuffer->cookie());
}
void IPU3CameraData::paramBufferReady(FrameBuffer *buffer)
@@ -1372,8 +1372,8 @@ void IPU3CameraData::statBufferReady(FrameBuffer *buffer)
return;
}
- ipa_->processStatsBuffer(info->id, request->metadata().get(controls::SensorTimestamp).value_or(0),
- info->statBuffer->cookie(), info->effectiveSensorControls);
+ ipa_->processStats(info->id, request->metadata().get(controls::SensorTimestamp).value_or(0),
+ info->statBuffer->cookie(), info->effectiveSensorControls);
}
/*
@@ -464,7 +464,7 @@ public:
int queueRequestDevice(Camera *camera, Request *request) override;
- void bufferReady(FrameBuffer *buffer);
+ void imageBufferReady(FrameBuffer *buffer);
bool match(DeviceEnumerator *enumerator) override;
@@ -878,7 +878,7 @@ int PipelineHandlerMaliC55::queueRequestDevice(Camera *camera, Request *request)
return 0;
}
-void PipelineHandlerMaliC55::bufferReady(FrameBuffer *buffer)
+void PipelineHandlerMaliC55::imageBufferReady(FrameBuffer *buffer)
{
Request *request = buffer->request();
@@ -992,7 +992,7 @@ bool PipelineHandlerMaliC55::match(DeviceEnumerator *enumerator)
if (frPipe->cap->open() < 0)
return false;
- frPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::bufferReady);
+ frPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::imageBufferReady);
dsFitted_ = !!media_->getEntityByName("mali-c55 ds");
if (dsFitted_) {
@@ -1008,7 +1008,7 @@ bool PipelineHandlerMaliC55::match(DeviceEnumerator *enumerator)
if (dsPipe->cap->open() < 0)
return false;
- dsPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::bufferReady);
+ dsPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::imageBufferReady);
}
ispSink = isp_->entity()->getPadByIndex(0);
@@ -114,7 +114,7 @@ public:
ControlInfoMap ipaControls_;
private:
- void paramFilled(unsigned int frame, unsigned int bytesused);
+ void paramsComputed(unsigned int frame, unsigned int bytesused);
void setSensorControls(unsigned int frame,
const ControlList &sensorControls);
@@ -180,9 +180,9 @@ private:
const RkISP1CameraConfiguration &config);
int createCamera(MediaEntity *sensor);
void tryCompleteRequest(RkISP1FrameInfo *info);
- void bufferReady(FrameBuffer *buffer);
- void paramReady(FrameBuffer *buffer);
- void statReady(FrameBuffer *buffer);
+ void imageBufferReady(FrameBuffer *buffer);
+ void paramBufferReady(FrameBuffer *buffer);
+ void statBufferReady(FrameBuffer *buffer);
void dewarpBufferReady(FrameBuffer *buffer);
void frameStart(uint32_t sequence);
@@ -367,7 +367,7 @@ int RkISP1CameraData::loadIPA(unsigned int hwRevision)
return -ENOENT;
ipa_->setSensorControls.connect(this, &RkISP1CameraData::setSensorControls);
- ipa_->paramsBufferReady.connect(this, &RkISP1CameraData::paramFilled);
+ ipa_->paramsComputed.connect(this, &RkISP1CameraData::paramsComputed);
ipa_->metadataReady.connect(this, &RkISP1CameraData::metadataReady);
/*
@@ -400,7 +400,7 @@ int RkISP1CameraData::loadIPA(unsigned int hwRevision)
return 0;
}
-void RkISP1CameraData::paramFilled(unsigned int frame, unsigned int bytesused)
+void RkISP1CameraData::paramsComputed(unsigned int frame, unsigned int bytesused)
{
PipelineHandlerRkISP1 *pipe = RkISP1CameraData::pipe();
RkISP1FrameInfo *info = frameInfo_.find(frame);
@@ -1120,8 +1120,8 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
if (data->selfPath_ && info->selfPathBuffer)
data->selfPath_->queueBuffer(info->selfPathBuffer);
} else {
- data->ipa_->fillParamsBuffer(data->frame_,
- info->paramBuffer->cookie());
+ data->ipa_->computeParams(data->frame_,
+ info->paramBuffer->cookie());
}
data->frame_++;
@@ -1334,11 +1334,11 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
if (hasSelfPath_ && !selfPath_.init(media_))
return false;
- mainPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady);
+ mainPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::imageBufferReady);
if (hasSelfPath_)
- selfPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady);
- stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
- param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
+ selfPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::imageBufferReady);
+ stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statBufferReady);
+ param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramBufferReady);
/* If dewarper is present, create its instance. */
DeviceMatch dwp("dw100");
@@ -1399,7 +1399,7 @@ void PipelineHandlerRkISP1::tryCompleteRequest(RkISP1FrameInfo *info)
completeRequest(request);
}
-void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
+void PipelineHandlerRkISP1::imageBufferReady(FrameBuffer *buffer)
{
ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_);
@@ -1424,7 +1424,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
if (isRaw_) {
const ControlList &ctrls =
data->delayedCtrls_->get(metadata.sequence);
- data->ipa_->processStatsBuffer(info->frame, 0, ctrls);
+ data->ipa_->processStats(info->frame, 0, ctrls);
}
} else {
if (isRaw_)
@@ -1508,7 +1508,7 @@ void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
tryCompleteRequest(info);
}
-void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer)
+void PipelineHandlerRkISP1::paramBufferReady(FrameBuffer *buffer)
{
ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_);
@@ -1521,7 +1521,7 @@ void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer)
tryCompleteRequest(info);
}
-void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer)
+void PipelineHandlerRkISP1::statBufferReady(FrameBuffer *buffer)
{
ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_);
@@ -1539,8 +1539,8 @@ void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer)
if (data->frame_ <= buffer->metadata().sequence)
data->frame_ = buffer->metadata().sequence + 1;
- data->ipa_->processStatsBuffer(info->frame, info->statBuffer->cookie(),
- data->delayedCtrls_->get(buffer->metadata().sequence));
+ data->ipa_->processStats(info->frame, info->statBuffer->cookie(),
+ data->delayedCtrls_->get(buffer->metadata().sequence));
}
REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1, "rkisp1")
@@ -225,7 +225,7 @@ public:
int setupFormats(V4L2SubdeviceFormat *format,
V4L2Subdevice::Whence whence,
Transform transform = Transform::Identity);
- void bufferReady(FrameBuffer *buffer);
+ void imageBufferReady(FrameBuffer *buffer);
unsigned int streamIndex(const Stream *stream) const
{
@@ -784,7 +784,7 @@ int SimpleCameraData::setupFormats(V4L2SubdeviceFormat *format,
return 0;
}
-void SimpleCameraData::bufferReady(FrameBuffer *buffer)
+void SimpleCameraData::imageBufferReady(FrameBuffer *buffer)
{
SimplePipelineHandler *pipe = SimpleCameraData::pipe();
@@ -1360,7 +1360,7 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL
return ret;
}
- video->bufferReady.connect(data, &SimpleCameraData::bufferReady);
+ video->bufferReady.connect(data, &SimpleCameraData::imageBufferReady);
ret = video->streamOn();
if (ret < 0) {
@@ -1404,7 +1404,7 @@ void SimplePipelineHandler::stopDevice(Camera *camera)
video->streamOff();
video->releaseBuffers();
- video->bufferReady.disconnect(data, &SimpleCameraData::bufferReady);
+ video->bufferReady.disconnect(data, &SimpleCameraData::imageBufferReady);
data->conversionBuffers_.clear();
@@ -47,7 +47,7 @@ public:
int init(MediaDevice *media);
void addControl(uint32_t cid, const ControlInfo &v4l2info,
ControlInfoMap::Map *ctrls);
- void bufferReady(FrameBuffer *buffer);
+ void imageBufferReady(FrameBuffer *buffer);
const std::string &id() const { return id_; }
@@ -476,7 +476,7 @@ int UVCCameraData::init(MediaDevice *media)
if (ret)
return ret;
- video_->bufferReady.connect(this, &UVCCameraData::bufferReady);
+ video_->bufferReady.connect(this, &UVCCameraData::imageBufferReady);
/* Generate the camera ID. */
if (!generateId()) {
@@ -747,7 +747,7 @@ void UVCCameraData::addControl(uint32_t cid, const ControlInfo &v4l2Info,
ctrls->emplace(id, info);
}
-void UVCCameraData::bufferReady(FrameBuffer *buffer)
+void UVCCameraData::imageBufferReady(FrameBuffer *buffer)
{
Request *request = buffer->request();
@@ -56,8 +56,8 @@ public:
int init();
int allocateMockIPABuffers();
- void bufferReady(FrameBuffer *buffer);
- void paramsBufferReady(unsigned int id, const Flags<ipa::vimc::TestFlag> flags);
+ void imageBufferReady(FrameBuffer *buffer);
+ void paramsComputed(unsigned int id, const Flags<ipa::vimc::TestFlag> flags);
MediaDevice *media_;
std::unique_ptr<CameraSensor> sensor_;
@@ -492,7 +492,7 @@ bool PipelineHandlerVimc::match(DeviceEnumerator *enumerator)
return false;
}
- data->ipa_->paramsBufferReady.connect(data.get(), &VimcCameraData::paramsBufferReady);
+ data->ipa_->paramsComputed.connect(data.get(), &VimcCameraData::paramsComputed);
std::string conf = data->ipa_->configurationFile("vimc.conf");
Flags<ipa::vimc::TestFlag> inFlags = ipa::vimc::TestFlag::Flag2;
@@ -549,7 +549,7 @@ int VimcCameraData::init()
if (video_->open())
return -ENODEV;
- video_->bufferReady.connect(this, &VimcCameraData::bufferReady);
+ video_->bufferReady.connect(this, &VimcCameraData::imageBufferReady);
raw_ = V4L2VideoDevice::fromEntityName(media_, "Raw Capture 1");
if (raw_->open())
@@ -597,7 +597,7 @@ int VimcCameraData::init()
return 0;
}
-void VimcCameraData::bufferReady(FrameBuffer *buffer)
+void VimcCameraData::imageBufferReady(FrameBuffer *buffer)
{
PipelineHandlerVimc *pipe =
static_cast<PipelineHandlerVimc *>(this->pipe());
@@ -622,7 +622,7 @@ void VimcCameraData::bufferReady(FrameBuffer *buffer)
pipe->completeBuffer(request, buffer);
pipe->completeRequest(request);
- ipa_->fillParamsBuffer(request->sequence(), mockIPABufs_[0]->cookie());
+ ipa_->computeParams(request->sequence(), mockIPABufs_[0]->cookie());
}
int VimcCameraData::allocateMockIPABuffers()
@@ -640,8 +640,8 @@ int VimcCameraData::allocateMockIPABuffers()
return video_->exportBuffers(kBufCount, &mockIPABufs_);
}
-void VimcCameraData::paramsBufferReady([[maybe_unused]] unsigned int id,
- [[maybe_unused]] const Flags<ipa::vimc::TestFlag> flags)
+void VimcCameraData::paramsComputed([[maybe_unused]] unsigned int id,
+ [[maybe_unused]] const Flags<ipa::vimc::TestFlag> flags)
{
}
@@ -351,7 +351,7 @@ void SoftwareIsp::stop()
*/
void SoftwareIsp::process(uint32_t frame, FrameBuffer *input, FrameBuffer *output)
{
- ipa_->fillParamsBuffer(frame);
+ ipa_->computeParams(frame);
debayer_->invokeMethod(&DebayerCpu::process,
ConnectionTypeQueued, frame, input, output, debayerParams_);
}