libcamera: Rationalize IPA and handlers names

The names used by the IPA interface and the names used for buffer
completions handlers in libcamera clash in the use of the term "buffer".

For example video device buffer completion handler is called
"bufferReady" and the IPA event to ask the IPA to compute parameters are
called "fillParamsBuffers". This makes it hard to recognize which
function handles video device completion signals and which ones handle
the IPA interface events.

Rationalize the naming scheme in the IPA interface function and events
and the signal handlers in the pipelines,  according to the
following table. Remove the name "buffer" from the IPA interface events
and events handler and reserve it for the buffer completion handlers.
Rename the IPA interface events and function to use the 'params' and
'stats' names as well.

IPA Interface:

- fillParamsBuffer -> computeParams   [FUNCTION]
- processStatsBuffer -> processStats  [FUNCTION]
- paramFilled -> paramsComputed       [EVENT]

Pipeline handler:

- bufferReady -> videoBufferReady     [BUFFER HANDLER]
- paramReady -> paramBufferReady      [BUFFER HANDLER]
- statReady -> statBufferReady        [BUFFER HANDLER]
- paramFilled -> paramsComputed       [IPA EVENT HANDLER]

Cosmetic change only, no functional changes intended.

Signed-off-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com>
Reviewed-by: Milan Zamazal <mzamazal@redhat.com>
Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
This commit is contained in:
Jacopo Mondi 2024-10-28 09:52:47 +01:00
parent 2cbf863f3f
commit 8fceb6ab1d
16 changed files with 86 additions and 86 deletions

View file

@ -31,14 +31,14 @@ interface IPAIPU3Interface {
unmapBuffers(array<uint32> ids); unmapBuffers(array<uint32> ids);
[async] queueRequest(uint32 frame, libcamera.ControlList controls); [async] queueRequest(uint32 frame, libcamera.ControlList controls);
[async] fillParamsBuffer(uint32 frame, uint32 bufferId); [async] computeParams(uint32 frame, uint32 bufferId);
[async] processStatsBuffer(uint32 frame, int64 frameTimestamp, [async] processStats(uint32 frame, int64 frameTimestamp,
uint32 bufferId, libcamera.ControlList sensorControls); uint32 bufferId, libcamera.ControlList sensorControls);
}; };
interface IPAIPU3EventInterface { interface IPAIPU3EventInterface {
setSensorControls(uint32 frame, libcamera.ControlList sensorControls, setSensorControls(uint32 frame, libcamera.ControlList sensorControls,
libcamera.ControlList lensControls); libcamera.ControlList lensControls);
paramsBufferReady(uint32 frame); paramsComputed(uint32 frame);
metadataReady(uint32 frame, libcamera.ControlList metadata); metadataReady(uint32 frame, libcamera.ControlList metadata);
}; };

View file

@ -31,13 +31,13 @@ interface IPARkISP1Interface {
unmapBuffers(array<uint32> ids); unmapBuffers(array<uint32> ids);
[async] queueRequest(uint32 frame, libcamera.ControlList reqControls); [async] queueRequest(uint32 frame, libcamera.ControlList reqControls);
[async] fillParamsBuffer(uint32 frame, uint32 bufferId); [async] computeParams(uint32 frame, uint32 bufferId);
[async] processStatsBuffer(uint32 frame, uint32 bufferId, [async] processStats(uint32 frame, uint32 bufferId,
libcamera.ControlList sensorControls); libcamera.ControlList sensorControls);
}; };
interface IPARkISP1EventInterface { interface IPARkISP1EventInterface {
paramsBufferReady(uint32 frame, uint32 bytesused); paramsComputed(uint32 frame, uint32 bytesused);
setSensorControls(uint32 frame, libcamera.ControlList sensorControls); setSensorControls(uint32 frame, libcamera.ControlList sensorControls);
metadataReady(uint32 frame, libcamera.ControlList metadata); metadataReady(uint32 frame, libcamera.ControlList metadata);
}; };

View file

@ -24,7 +24,7 @@ interface IPASoftInterface {
=> (int32 ret); => (int32 ret);
[async] queueRequest(uint32 frame, libcamera.ControlList sensorControls); [async] queueRequest(uint32 frame, libcamera.ControlList sensorControls);
[async] fillParamsBuffer(uint32 frame); [async] computeParams(uint32 frame);
[async] processStats(uint32 frame, [async] processStats(uint32 frame,
uint32 bufferId, uint32 bufferId,
libcamera.ControlList sensorControls); libcamera.ControlList sensorControls);

View file

@ -47,9 +47,9 @@ interface IPAVimcInterface {
* interface functions that mimick how other pipeline handlers typically * interface functions that mimick how other pipeline handlers typically
* handle parameters at runtime. * handle parameters at runtime.
*/ */
[async] fillParamsBuffer(uint32 frame, uint32 bufferId); [async] computeParams(uint32 frame, uint32 bufferId);
}; };
interface IPAVimcEventInterface { interface IPAVimcEventInterface {
paramsBufferReady(uint32 bufferId, [flags] TestFlag flags); paramsComputed(uint32 bufferId, [flags] TestFlag flags);
}; };

View file

@ -27,8 +27,8 @@ from applications, and managing events from the pipeline handler.
└─┬───┬───┬──────┬────┬────┬────┬─┴────▼─┬──┘ 1: init() └─┬───┬───┬──────┬────┬────┬────┬─┴────▼─┬──┘ 1: init()
│ │ │ │ ▲ │ ▲ │ ▲ │ ▲ │ 2: configure() │ │ │ │ ▲ │ ▲ │ ▲ │ ▲ │ 2: configure()
│1 │2 │3 │4│ │4│ │4│ │4│ │5 3: mapBuffers(), start() │1 │2 │3 │4│ │4│ │4│ │4│ │5 3: mapBuffers(), start()
│ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), fillParamsBuffer(), processStatsBuffer() │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), computeParams(), processStats()
▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsBufferReady, metadataReady Signals ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsComputed, metadataReady Signals
┌──────────────────┴────┴────┴────┴─────────┐ 5: stop(), unmapBuffers() ┌──────────────────┴────┴────┴────┴─────────┐ 5: stop(), unmapBuffers()
│ IPU3 IPA │ │ IPU3 IPA │
│ ┌───────────────────────┐ │ │ ┌───────────────────────┐ │
@ -104,8 +104,8 @@ to operate when running:
- configure() - configure()
- queueRequest() - queueRequest()
- fillParamsBuffer() - computeParams()
- processStatsBuffer() - processStats()
The configuration phase allows the pipeline-handler to inform the IPA of The configuration phase allows the pipeline-handler to inform the IPA of
the current stream configurations, which is then passed into each the current stream configurations, which is then passed into each
@ -119,7 +119,7 @@ When configured, the IPA is notified by the pipeline handler of the
Camera ``start()`` event, after which incoming requests will be queued Camera ``start()`` event, after which incoming requests will be queued
for processing, requiring a parameter buffer (``ipu3_uapi_params``) to for processing, requiring a parameter buffer (``ipu3_uapi_params``) to
be populated for the ImgU. This is given to the IPA through be populated for the ImgU. This is given to the IPA through
``fillParamsBuffer()``, and then passed directly to each algorithm ``computeParams()``, and then passed directly to each algorithm
through the ``prepare()`` call allowing the ISP configuration to be through the ``prepare()`` call allowing the ISP configuration to be
updated for the needs of each component that the algorithm is updated for the needs of each component that the algorithm is
responsible for. responsible for.
@ -129,7 +129,7 @@ structure that it modifies, and it should take care to ensure that any
structure set by a use flag is fully initialised to suitable values. structure set by a use flag is fully initialised to suitable values.
The parameter buffer is returned to the pipeline handler through the The parameter buffer is returned to the pipeline handler through the
``paramsBufferReady`` signal, and from there queued to the ImgU along ``paramsComputed`` signal, and from there queued to the ImgU along
with a raw frame captured with the CIO2. with a raw frame captured with the CIO2.
Post-frame completion Post-frame completion
@ -138,7 +138,7 @@ Post-frame completion
When the capture of an image is completed, and successfully processed When the capture of an image is completed, and successfully processed
through the ImgU, the generated statistics buffer through the ImgU, the generated statistics buffer
(``ipu3_uapi_stats_3a``) is given to the IPA through (``ipu3_uapi_stats_3a``) is given to the IPA through
``processStatsBuffer()``. This provides the IPA with an opportunity to ``processStats()``. This provides the IPA with an opportunity to
examine the results of the ISP and run the calculations required by each examine the results of the ISP and run the calculations required by each
algorithm on the new data. The algorithms may require context from the algorithm on the new data. The algorithms may require context from the
operations of other algorithms, for example, the AWB might choose to use operations of other algorithms, for example, the AWB might choose to use

View file

@ -87,14 +87,14 @@ namespace ipa::ipu3 {
* parameter buffer, and adapting the settings of the sensor attached to the * parameter buffer, and adapting the settings of the sensor attached to the
* IPU3 CIO2 through sensor-specific V4L2 controls. * IPU3 CIO2 through sensor-specific V4L2 controls.
* *
* In fillParamsBuffer(), we populate the ImgU parameter buffer with * In computeParams(), we populate the ImgU parameter buffer with
* settings to configure the device in preparation for handling the frame * settings to configure the device in preparation for handling the frame
* queued in the Request. * queued in the Request.
* *
* When the frame has completed processing, the ImgU will generate a statistics * When the frame has completed processing, the ImgU will generate a statistics
* buffer which is given to the IPA with processStatsBuffer(). In this we run the * buffer which is given to the IPA with processStats(). In this we run the
* algorithms to parse the statistics and cache any results for the next * algorithms to parse the statistics and cache any results for the next
* fillParamsBuffer() call. * computeParams() call.
* *
* The individual algorithms are split into modular components that are called * The individual algorithms are split into modular components that are called
* iteratively to allow them to process statistics from the ImgU in the order * iteratively to allow them to process statistics from the ImgU in the order
@ -155,10 +155,10 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override; void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(const uint32_t frame, const ControlList &controls) override; void queueRequest(const uint32_t frame, const ControlList &controls) override;
void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override; void computeParams(const uint32_t frame, const uint32_t bufferId) override;
void processStatsBuffer(const uint32_t frame, const int64_t frameTimestamp, void processStats(const uint32_t frame, const int64_t frameTimestamp,
const uint32_t bufferId, const uint32_t bufferId,
const ControlList &sensorControls) override; const ControlList &sensorControls) override;
protected: protected:
std::string logPrefix() const override; std::string logPrefix() const override;
@ -538,7 +538,7 @@ void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids)
* Algorithms are expected to fill the IPU3 parameter buffer for the next * Algorithms are expected to fill the IPU3 parameter buffer for the next
* frame given their most recent processing of the ImgU statistics. * frame given their most recent processing of the ImgU statistics.
*/ */
void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) void IPAIPU3::computeParams(const uint32_t frame, const uint32_t bufferId)
{ {
auto it = buffers_.find(bufferId); auto it = buffers_.find(bufferId);
if (it == buffers_.end()) { if (it == buffers_.end()) {
@ -566,7 +566,7 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
for (auto const &algo : algorithms()) for (auto const &algo : algorithms())
algo->prepare(context_, frame, frameContext, params); algo->prepare(context_, frame, frameContext, params);
paramsBufferReady.emit(frame); paramsComputed.emit(frame);
} }
/** /**
@ -580,9 +580,9 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
* statistics are passed to each algorithm module to run their calculations and * statistics are passed to each algorithm module to run their calculations and
* update their state accordingly. * update their state accordingly.
*/ */
void IPAIPU3::processStatsBuffer(const uint32_t frame, void IPAIPU3::processStats(const uint32_t frame,
[[maybe_unused]] const int64_t frameTimestamp, [[maybe_unused]] const int64_t frameTimestamp,
const uint32_t bufferId, const ControlList &sensorControls) const uint32_t bufferId, const ControlList &sensorControls)
{ {
auto it = buffers_.find(bufferId); auto it = buffers_.find(bufferId);
if (it == buffers_.end()) { if (it == buffers_.end()) {

View file

@ -65,9 +65,9 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override; void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(const uint32_t frame, const ControlList &controls) override; void queueRequest(const uint32_t frame, const ControlList &controls) override;
void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override; void computeParams(const uint32_t frame, const uint32_t bufferId) override;
void processStatsBuffer(const uint32_t frame, const uint32_t bufferId, void processStats(const uint32_t frame, const uint32_t bufferId,
const ControlList &sensorControls) override; const ControlList &sensorControls) override;
protected: protected:
std::string logPrefix() const override; std::string logPrefix() const override;
@ -335,7 +335,7 @@ void IPARkISP1::queueRequest(const uint32_t frame, const ControlList &controls)
} }
} }
void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) void IPARkISP1::computeParams(const uint32_t frame, const uint32_t bufferId)
{ {
IPAFrameContext &frameContext = context_.frameContexts.get(frame); IPAFrameContext &frameContext = context_.frameContexts.get(frame);
@ -345,11 +345,11 @@ void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
for (auto const &algo : algorithms()) for (auto const &algo : algorithms())
algo->prepare(context_, frame, frameContext, &params); algo->prepare(context_, frame, frameContext, &params);
paramsBufferReady.emit(frame, params.size()); paramsComputed.emit(frame, params.size());
} }
void IPARkISP1::processStatsBuffer(const uint32_t frame, const uint32_t bufferId, void IPARkISP1::processStats(const uint32_t frame, const uint32_t bufferId,
const ControlList &sensorControls) const ControlList &sensorControls)
{ {
IPAFrameContext &frameContext = context_.frameContexts.get(frame); IPAFrameContext &frameContext = context_.frameContexts.get(frame);

View file

@ -57,7 +57,7 @@ public:
void stop() override; void stop() override;
void queueRequest(const uint32_t frame, const ControlList &controls) override; void queueRequest(const uint32_t frame, const ControlList &controls) override;
void fillParamsBuffer(const uint32_t frame) override; void computeParams(const uint32_t frame) override;
void processStats(const uint32_t frame, const uint32_t bufferId, void processStats(const uint32_t frame, const uint32_t bufferId,
const ControlList &sensorControls) override; const ControlList &sensorControls) override;
@ -272,7 +272,7 @@ void IPASoftSimple::queueRequest(const uint32_t frame, const ControlList &contro
algo->queueRequest(context_, frame, frameContext, controls); algo->queueRequest(context_, frame, frameContext, controls);
} }
void IPASoftSimple::fillParamsBuffer(const uint32_t frame) void IPASoftSimple::computeParams(const uint32_t frame)
{ {
IPAFrameContext &frameContext = context_.frameContexts.get(frame); IPAFrameContext &frameContext = context_.frameContexts.get(frame);
for (auto const &algo : algorithms()) for (auto const &algo : algorithms())

View file

@ -48,7 +48,7 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override; void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(uint32_t frame, const ControlList &controls) override; void queueRequest(uint32_t frame, const ControlList &controls) override;
void fillParamsBuffer(uint32_t frame, uint32_t bufferId) override; void computeParams(uint32_t frame, uint32_t bufferId) override;
private: private:
void initTrace(); void initTrace();
@ -150,7 +150,7 @@ void IPAVimc::queueRequest([[maybe_unused]] uint32_t frame,
{ {
} }
void IPAVimc::fillParamsBuffer([[maybe_unused]] uint32_t frame, uint32_t bufferId) void IPAVimc::computeParams([[maybe_unused]] uint32_t frame, uint32_t bufferId)
{ {
auto it = buffers_.find(bufferId); auto it = buffers_.find(bufferId);
if (it == buffers_.end()) { if (it == buffers_.end()) {
@ -159,7 +159,7 @@ void IPAVimc::fillParamsBuffer([[maybe_unused]] uint32_t frame, uint32_t bufferI
} }
Flags<ipa::vimc::TestFlag> flags; Flags<ipa::vimc::TestFlag> flags;
paramsBufferReady.emit(bufferId, flags); paramsComputed.emit(bufferId, flags);
} }
void IPAVimc::initTrace() void IPAVimc::initTrace()

View file

@ -88,7 +88,7 @@ public:
private: private:
void metadataReady(unsigned int id, const ControlList &metadata); void metadataReady(unsigned int id, const ControlList &metadata);
void paramsBufferReady(unsigned int id); void paramsComputed(unsigned int id);
void setSensorControls(unsigned int id, const ControlList &sensorControls, void setSensorControls(unsigned int id, const ControlList &sensorControls,
const ControlList &lensControls); const ControlList &lensControls);
}; };
@ -1156,7 +1156,7 @@ int IPU3CameraData::loadIPA()
return -ENOENT; return -ENOENT;
ipa_->setSensorControls.connect(this, &IPU3CameraData::setSensorControls); ipa_->setSensorControls.connect(this, &IPU3CameraData::setSensorControls);
ipa_->paramsBufferReady.connect(this, &IPU3CameraData::paramsBufferReady); ipa_->paramsComputed.connect(this, &IPU3CameraData::paramsComputed);
ipa_->metadataReady.connect(this, &IPU3CameraData::metadataReady); ipa_->metadataReady.connect(this, &IPU3CameraData::metadataReady);
/* /*
@ -1217,7 +1217,7 @@ void IPU3CameraData::setSensorControls([[maybe_unused]] unsigned int id,
focusLens->setFocusPosition(focusValue.get<int32_t>()); focusLens->setFocusPosition(focusValue.get<int32_t>());
} }
void IPU3CameraData::paramsBufferReady(unsigned int id) void IPU3CameraData::paramsComputed(unsigned int id)
{ {
IPU3Frames::Info *info = frameInfos_.find(id); IPU3Frames::Info *info = frameInfos_.find(id);
if (!info) if (!info)
@ -1328,7 +1328,7 @@ void IPU3CameraData::cio2BufferReady(FrameBuffer *buffer)
if (request->findBuffer(&rawStream_)) if (request->findBuffer(&rawStream_))
pipe()->completeBuffer(request, buffer); pipe()->completeBuffer(request, buffer);
ipa_->fillParamsBuffer(info->id, info->paramBuffer->cookie()); ipa_->computeParams(info->id, info->paramBuffer->cookie());
} }
void IPU3CameraData::paramBufferReady(FrameBuffer *buffer) void IPU3CameraData::paramBufferReady(FrameBuffer *buffer)
@ -1372,8 +1372,8 @@ void IPU3CameraData::statBufferReady(FrameBuffer *buffer)
return; return;
} }
ipa_->processStatsBuffer(info->id, request->metadata().get(controls::SensorTimestamp).value_or(0), ipa_->processStats(info->id, request->metadata().get(controls::SensorTimestamp).value_or(0),
info->statBuffer->cookie(), info->effectiveSensorControls); info->statBuffer->cookie(), info->effectiveSensorControls);
} }
/* /*

View file

@ -463,7 +463,7 @@ public:
int queueRequestDevice(Camera *camera, Request *request) override; int queueRequestDevice(Camera *camera, Request *request) override;
void bufferReady(FrameBuffer *buffer); void imageBufferReady(FrameBuffer *buffer);
bool match(DeviceEnumerator *enumerator) override; bool match(DeviceEnumerator *enumerator) override;
@ -877,7 +877,7 @@ int PipelineHandlerMaliC55::queueRequestDevice(Camera *camera, Request *request)
return 0; return 0;
} }
void PipelineHandlerMaliC55::bufferReady(FrameBuffer *buffer) void PipelineHandlerMaliC55::imageBufferReady(FrameBuffer *buffer)
{ {
Request *request = buffer->request(); Request *request = buffer->request();
@ -991,7 +991,7 @@ bool PipelineHandlerMaliC55::match(DeviceEnumerator *enumerator)
if (frPipe->cap->open() < 0) if (frPipe->cap->open() < 0)
return false; return false;
frPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::bufferReady); frPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::imageBufferReady);
dsFitted_ = !!media_->getEntityByName("mali-c55 ds"); dsFitted_ = !!media_->getEntityByName("mali-c55 ds");
if (dsFitted_) { if (dsFitted_) {
@ -1007,7 +1007,7 @@ bool PipelineHandlerMaliC55::match(DeviceEnumerator *enumerator)
if (dsPipe->cap->open() < 0) if (dsPipe->cap->open() < 0)
return false; return false;
dsPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::bufferReady); dsPipe->cap->bufferReady.connect(this, &PipelineHandlerMaliC55::imageBufferReady);
} }
ispSink = isp_->entity()->getPadByIndex(0); ispSink = isp_->entity()->getPadByIndex(0);

View file

@ -114,7 +114,7 @@ public:
ControlInfoMap ipaControls_; ControlInfoMap ipaControls_;
private: private:
void paramFilled(unsigned int frame, unsigned int bytesused); void paramsComputed(unsigned int frame, unsigned int bytesused);
void setSensorControls(unsigned int frame, void setSensorControls(unsigned int frame,
const ControlList &sensorControls); const ControlList &sensorControls);
@ -180,9 +180,9 @@ private:
const RkISP1CameraConfiguration &config); const RkISP1CameraConfiguration &config);
int createCamera(MediaEntity *sensor); int createCamera(MediaEntity *sensor);
void tryCompleteRequest(RkISP1FrameInfo *info); void tryCompleteRequest(RkISP1FrameInfo *info);
void bufferReady(FrameBuffer *buffer); void imageBufferReady(FrameBuffer *buffer);
void paramReady(FrameBuffer *buffer); void paramBufferReady(FrameBuffer *buffer);
void statReady(FrameBuffer *buffer); void statBufferReady(FrameBuffer *buffer);
void dewarpBufferReady(FrameBuffer *buffer); void dewarpBufferReady(FrameBuffer *buffer);
void frameStart(uint32_t sequence); void frameStart(uint32_t sequence);
@ -367,7 +367,7 @@ int RkISP1CameraData::loadIPA(unsigned int hwRevision)
return -ENOENT; return -ENOENT;
ipa_->setSensorControls.connect(this, &RkISP1CameraData::setSensorControls); ipa_->setSensorControls.connect(this, &RkISP1CameraData::setSensorControls);
ipa_->paramsBufferReady.connect(this, &RkISP1CameraData::paramFilled); ipa_->paramsComputed.connect(this, &RkISP1CameraData::paramsComputed);
ipa_->metadataReady.connect(this, &RkISP1CameraData::metadataReady); ipa_->metadataReady.connect(this, &RkISP1CameraData::metadataReady);
/* /*
@ -400,7 +400,7 @@ int RkISP1CameraData::loadIPA(unsigned int hwRevision)
return 0; return 0;
} }
void RkISP1CameraData::paramFilled(unsigned int frame, unsigned int bytesused) void RkISP1CameraData::paramsComputed(unsigned int frame, unsigned int bytesused)
{ {
PipelineHandlerRkISP1 *pipe = RkISP1CameraData::pipe(); PipelineHandlerRkISP1 *pipe = RkISP1CameraData::pipe();
RkISP1FrameInfo *info = frameInfo_.find(frame); RkISP1FrameInfo *info = frameInfo_.find(frame);
@ -1120,8 +1120,8 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
if (data->selfPath_ && info->selfPathBuffer) if (data->selfPath_ && info->selfPathBuffer)
data->selfPath_->queueBuffer(info->selfPathBuffer); data->selfPath_->queueBuffer(info->selfPathBuffer);
} else { } else {
data->ipa_->fillParamsBuffer(data->frame_, data->ipa_->computeParams(data->frame_,
info->paramBuffer->cookie()); info->paramBuffer->cookie());
} }
data->frame_++; data->frame_++;
@ -1333,11 +1333,11 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
if (hasSelfPath_ && !selfPath_.init(media_)) if (hasSelfPath_ && !selfPath_.init(media_))
return false; return false;
mainPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady); mainPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::imageBufferReady);
if (hasSelfPath_) if (hasSelfPath_)
selfPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady); selfPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::imageBufferReady);
stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady); stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statBufferReady);
param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady); param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramBufferReady);
/* If dewarper is present, create its instance. */ /* If dewarper is present, create its instance. */
DeviceMatch dwp("dw100"); DeviceMatch dwp("dw100");
@ -1398,7 +1398,7 @@ void PipelineHandlerRkISP1::tryCompleteRequest(RkISP1FrameInfo *info)
completeRequest(request); completeRequest(request);
} }
void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer) void PipelineHandlerRkISP1::imageBufferReady(FrameBuffer *buffer)
{ {
ASSERT(activeCamera_); ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_); RkISP1CameraData *data = cameraData(activeCamera_);
@ -1423,7 +1423,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
if (isRaw_) { if (isRaw_) {
const ControlList &ctrls = const ControlList &ctrls =
data->delayedCtrls_->get(metadata.sequence); data->delayedCtrls_->get(metadata.sequence);
data->ipa_->processStatsBuffer(info->frame, 0, ctrls); data->ipa_->processStats(info->frame, 0, ctrls);
} }
} else { } else {
if (isRaw_) if (isRaw_)
@ -1507,7 +1507,7 @@ void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
tryCompleteRequest(info); tryCompleteRequest(info);
} }
void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer) void PipelineHandlerRkISP1::paramBufferReady(FrameBuffer *buffer)
{ {
ASSERT(activeCamera_); ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_); RkISP1CameraData *data = cameraData(activeCamera_);
@ -1520,7 +1520,7 @@ void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer)
tryCompleteRequest(info); tryCompleteRequest(info);
} }
void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer) void PipelineHandlerRkISP1::statBufferReady(FrameBuffer *buffer)
{ {
ASSERT(activeCamera_); ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_); RkISP1CameraData *data = cameraData(activeCamera_);
@ -1538,8 +1538,8 @@ void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer)
if (data->frame_ <= buffer->metadata().sequence) if (data->frame_ <= buffer->metadata().sequence)
data->frame_ = buffer->metadata().sequence + 1; data->frame_ = buffer->metadata().sequence + 1;
data->ipa_->processStatsBuffer(info->frame, info->statBuffer->cookie(), data->ipa_->processStats(info->frame, info->statBuffer->cookie(),
data->delayedCtrls_->get(buffer->metadata().sequence)); data->delayedCtrls_->get(buffer->metadata().sequence));
} }
REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1, "rkisp1") REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1, "rkisp1")

View file

@ -225,7 +225,7 @@ public:
int setupFormats(V4L2SubdeviceFormat *format, int setupFormats(V4L2SubdeviceFormat *format,
V4L2Subdevice::Whence whence, V4L2Subdevice::Whence whence,
Transform transform = Transform::Identity); Transform transform = Transform::Identity);
void bufferReady(FrameBuffer *buffer); void imageBufferReady(FrameBuffer *buffer);
void clearIncompleteRequests(); void clearIncompleteRequests();
unsigned int streamIndex(const Stream *stream) const unsigned int streamIndex(const Stream *stream) const
@ -784,7 +784,7 @@ int SimpleCameraData::setupFormats(V4L2SubdeviceFormat *format,
return 0; return 0;
} }
void SimpleCameraData::bufferReady(FrameBuffer *buffer) void SimpleCameraData::imageBufferReady(FrameBuffer *buffer)
{ {
SimplePipelineHandler *pipe = SimpleCameraData::pipe(); SimplePipelineHandler *pipe = SimpleCameraData::pipe();
@ -1364,7 +1364,7 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL
return ret; return ret;
} }
video->bufferReady.connect(data, &SimpleCameraData::bufferReady); video->bufferReady.connect(data, &SimpleCameraData::imageBufferReady);
ret = video->streamOn(); ret = video->streamOn();
if (ret < 0) { if (ret < 0) {
@ -1408,7 +1408,7 @@ void SimplePipelineHandler::stopDevice(Camera *camera)
video->streamOff(); video->streamOff();
video->releaseBuffers(); video->releaseBuffers();
video->bufferReady.disconnect(data, &SimpleCameraData::bufferReady); video->bufferReady.disconnect(data, &SimpleCameraData::imageBufferReady);
data->clearIncompleteRequests(); data->clearIncompleteRequests();
data->conversionBuffers_.clear(); data->conversionBuffers_.clear();

View file

@ -47,7 +47,7 @@ public:
int init(MediaDevice *media); int init(MediaDevice *media);
void addControl(uint32_t cid, const ControlInfo &v4l2info, void addControl(uint32_t cid, const ControlInfo &v4l2info,
ControlInfoMap::Map *ctrls); ControlInfoMap::Map *ctrls);
void bufferReady(FrameBuffer *buffer); void imageBufferReady(FrameBuffer *buffer);
const std::string &id() const { return id_; } const std::string &id() const { return id_; }
@ -476,7 +476,7 @@ int UVCCameraData::init(MediaDevice *media)
if (ret) if (ret)
return ret; return ret;
video_->bufferReady.connect(this, &UVCCameraData::bufferReady); video_->bufferReady.connect(this, &UVCCameraData::imageBufferReady);
/* Generate the camera ID. */ /* Generate the camera ID. */
if (!generateId()) { if (!generateId()) {
@ -747,7 +747,7 @@ void UVCCameraData::addControl(uint32_t cid, const ControlInfo &v4l2Info,
ctrls->emplace(id, info); ctrls->emplace(id, info);
} }
void UVCCameraData::bufferReady(FrameBuffer *buffer) void UVCCameraData::imageBufferReady(FrameBuffer *buffer)
{ {
Request *request = buffer->request(); Request *request = buffer->request();

View file

@ -56,8 +56,8 @@ public:
int init(); int init();
int allocateMockIPABuffers(); int allocateMockIPABuffers();
void bufferReady(FrameBuffer *buffer); void imageBufferReady(FrameBuffer *buffer);
void paramsBufferReady(unsigned int id, const Flags<ipa::vimc::TestFlag> flags); void paramsComputed(unsigned int id, const Flags<ipa::vimc::TestFlag> flags);
MediaDevice *media_; MediaDevice *media_;
std::unique_ptr<CameraSensor> sensor_; std::unique_ptr<CameraSensor> sensor_;
@ -492,7 +492,7 @@ bool PipelineHandlerVimc::match(DeviceEnumerator *enumerator)
return false; return false;
} }
data->ipa_->paramsBufferReady.connect(data.get(), &VimcCameraData::paramsBufferReady); data->ipa_->paramsComputed.connect(data.get(), &VimcCameraData::paramsComputed);
std::string conf = data->ipa_->configurationFile("vimc.conf"); std::string conf = data->ipa_->configurationFile("vimc.conf");
Flags<ipa::vimc::TestFlag> inFlags = ipa::vimc::TestFlag::Flag2; Flags<ipa::vimc::TestFlag> inFlags = ipa::vimc::TestFlag::Flag2;
@ -548,7 +548,7 @@ int VimcCameraData::init()
if (video_->open()) if (video_->open())
return -ENODEV; return -ENODEV;
video_->bufferReady.connect(this, &VimcCameraData::bufferReady); video_->bufferReady.connect(this, &VimcCameraData::imageBufferReady);
raw_ = V4L2VideoDevice::fromEntityName(media_, "Raw Capture 1"); raw_ = V4L2VideoDevice::fromEntityName(media_, "Raw Capture 1");
if (raw_->open()) if (raw_->open())
@ -596,7 +596,7 @@ int VimcCameraData::init()
return 0; return 0;
} }
void VimcCameraData::bufferReady(FrameBuffer *buffer) void VimcCameraData::imageBufferReady(FrameBuffer *buffer)
{ {
PipelineHandlerVimc *pipe = PipelineHandlerVimc *pipe =
static_cast<PipelineHandlerVimc *>(this->pipe()); static_cast<PipelineHandlerVimc *>(this->pipe());
@ -621,7 +621,7 @@ void VimcCameraData::bufferReady(FrameBuffer *buffer)
pipe->completeBuffer(request, buffer); pipe->completeBuffer(request, buffer);
pipe->completeRequest(request); pipe->completeRequest(request);
ipa_->fillParamsBuffer(request->sequence(), mockIPABufs_[0]->cookie()); ipa_->computeParams(request->sequence(), mockIPABufs_[0]->cookie());
} }
int VimcCameraData::allocateMockIPABuffers() int VimcCameraData::allocateMockIPABuffers()
@ -639,8 +639,8 @@ int VimcCameraData::allocateMockIPABuffers()
return video_->exportBuffers(kBufCount, &mockIPABufs_); return video_->exportBuffers(kBufCount, &mockIPABufs_);
} }
void VimcCameraData::paramsBufferReady([[maybe_unused]] unsigned int id, void VimcCameraData::paramsComputed([[maybe_unused]] unsigned int id,
[[maybe_unused]] const Flags<ipa::vimc::TestFlag> flags) [[maybe_unused]] const Flags<ipa::vimc::TestFlag> flags)
{ {
} }

View file

@ -351,7 +351,7 @@ void SoftwareIsp::stop()
*/ */
void SoftwareIsp::process(uint32_t frame, FrameBuffer *input, FrameBuffer *output) void SoftwareIsp::process(uint32_t frame, FrameBuffer *input, FrameBuffer *output)
{ {
ipa_->fillParamsBuffer(frame); ipa_->computeParams(frame);
debayer_->invokeMethod(&DebayerCpu::process, debayer_->invokeMethod(&DebayerCpu::process,
ConnectionTypeQueued, frame, input, output, debayerParams_); ConnectionTypeQueued, frame, input, output, debayerParams_);
} }