ipa: ipu3: Rework IPAFrameContext
Currently, IPAFrameContext consolidates the values computed by the active state of the algorithms, along with the values applied on the sensor. Moving ahead, we want to have a frame context associated with each incoming request (or frame to be captured). This shouldn't necessarily be tied to "active state" of the algorithms hence: - Rename current IPAFrameContext -> IPAActiveState This will now reflect the latest active state of the algorithms and has nothing to do with any frame-related ops/values. - Re-instate IPAFrameContext with a sub-structure 'sensor' currently storing the exposure and gain value. Adapt the various access to the frame context to the new changes as described above. Subsequently, the re-instated IPAFrameContext will be extended to contain a frame number and ControlList to remember the incoming request controls provided by the application. A ring-buffer will be introduced to store these frame contexts for a certain number of frames. Signed-off-by: Umang Jain <umang.jain@ideasonboard.com> Reviewed-by: Jacopo Mondi <jacopo@jmondi.org> Reviewed-by: Jean-Michel Hautbois <jeanmichel.hautbois@ideasonboard.com> Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
This commit is contained in:
parent
e0766fa205
commit
bab437df1f
8 changed files with 113 additions and 95 deletions
|
@ -185,11 +185,11 @@ int Af::configure(IPAContext &context, const IPAConfigInfo &configInfo)
|
|||
afIgnoreFrameReset();
|
||||
|
||||
/* Initial focus value */
|
||||
context.frameContext.af.focus = 0;
|
||||
context.activeState.af.focus = 0;
|
||||
/* Maximum variance of the AF statistics */
|
||||
context.frameContext.af.maxVariance = 0;
|
||||
context.activeState.af.maxVariance = 0;
|
||||
/* The stable AF value flag. if it is true, the AF should be in a stable state. */
|
||||
context.frameContext.af.stable = false;
|
||||
context.activeState.af.stable = false;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -211,10 +211,10 @@ void Af::afCoarseScan(IPAContext &context)
|
|||
|
||||
if (afScan(context, kCoarseSearchStep)) {
|
||||
coarseCompleted_ = true;
|
||||
context.frameContext.af.maxVariance = 0;
|
||||
focus_ = context.frameContext.af.focus -
|
||||
(context.frameContext.af.focus * kFineRange);
|
||||
context.frameContext.af.focus = focus_;
|
||||
context.activeState.af.maxVariance = 0;
|
||||
focus_ = context.activeState.af.focus -
|
||||
(context.activeState.af.focus * kFineRange);
|
||||
context.activeState.af.focus = focus_;
|
||||
previousVariance_ = 0;
|
||||
maxStep_ = std::clamp(focus_ + static_cast<uint32_t>((focus_ * kFineRange)),
|
||||
0U, kMaxFocusSteps);
|
||||
|
@ -237,7 +237,7 @@ void Af::afFineScan(IPAContext &context)
|
|||
return;
|
||||
|
||||
if (afScan(context, kFineSearchStep)) {
|
||||
context.frameContext.af.stable = true;
|
||||
context.activeState.af.stable = true;
|
||||
fineCompleted_ = true;
|
||||
}
|
||||
}
|
||||
|
@ -254,10 +254,10 @@ void Af::afReset(IPAContext &context)
|
|||
if (afNeedIgnoreFrame())
|
||||
return;
|
||||
|
||||
context.frameContext.af.maxVariance = 0;
|
||||
context.frameContext.af.focus = 0;
|
||||
context.activeState.af.maxVariance = 0;
|
||||
context.activeState.af.focus = 0;
|
||||
focus_ = 0;
|
||||
context.frameContext.af.stable = false;
|
||||
context.activeState.af.stable = false;
|
||||
ignoreCounter_ = kIgnoreFrame;
|
||||
previousVariance_ = 0.0;
|
||||
coarseCompleted_ = false;
|
||||
|
@ -280,7 +280,7 @@ bool Af::afScan(IPAContext &context, int min_step)
|
|||
{
|
||||
if (focus_ > maxStep_) {
|
||||
/* If reach the max step, move lens to the position. */
|
||||
context.frameContext.af.focus = bestFocus_;
|
||||
context.activeState.af.focus = bestFocus_;
|
||||
return true;
|
||||
} else {
|
||||
/*
|
||||
|
@ -288,8 +288,8 @@ bool Af::afScan(IPAContext &context, int min_step)
|
|||
* derivative. If the direction changes, it means we have
|
||||
* passed a maximum one step before.
|
||||
*/
|
||||
if ((currentVariance_ - context.frameContext.af.maxVariance) >=
|
||||
-(context.frameContext.af.maxVariance * 0.1)) {
|
||||
if ((currentVariance_ - context.activeState.af.maxVariance) >=
|
||||
-(context.activeState.af.maxVariance * 0.1)) {
|
||||
/*
|
||||
* Positive and zero derivative:
|
||||
* The variance is still increasing. The focus could be
|
||||
|
@ -298,8 +298,8 @@ bool Af::afScan(IPAContext &context, int min_step)
|
|||
*/
|
||||
bestFocus_ = focus_;
|
||||
focus_ += min_step;
|
||||
context.frameContext.af.focus = focus_;
|
||||
context.frameContext.af.maxVariance = currentVariance_;
|
||||
context.activeState.af.focus = focus_;
|
||||
context.activeState.af.maxVariance = currentVariance_;
|
||||
} else {
|
||||
/*
|
||||
* Negative derivative:
|
||||
|
@ -307,7 +307,7 @@ bool Af::afScan(IPAContext &context, int min_step)
|
|||
* variance is found. Set focus step to previous good one
|
||||
* then return immediately.
|
||||
*/
|
||||
context.frameContext.af.focus = bestFocus_;
|
||||
context.activeState.af.focus = bestFocus_;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -389,13 +389,13 @@ double Af::afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1)
|
|||
bool Af::afIsOutOfFocus(IPAContext context)
|
||||
{
|
||||
const uint32_t diff_var = std::abs(currentVariance_ -
|
||||
context.frameContext.af.maxVariance);
|
||||
const double var_ratio = diff_var / context.frameContext.af.maxVariance;
|
||||
context.activeState.af.maxVariance);
|
||||
const double var_ratio = diff_var / context.activeState.af.maxVariance;
|
||||
|
||||
LOG(IPU3Af, Debug) << "Variance change rate: "
|
||||
<< var_ratio
|
||||
<< " Current VCM step: "
|
||||
<< context.frameContext.af.focus;
|
||||
<< context.activeState.af.focus;
|
||||
|
||||
if (var_ratio > kMaxChange)
|
||||
return true;
|
||||
|
@ -437,7 +437,7 @@ void Af::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
|
|||
*/
|
||||
currentVariance_ = afEstimateVariance(y_items, !coarseCompleted_);
|
||||
|
||||
if (!context.frameContext.af.stable) {
|
||||
if (!context.activeState.af.stable) {
|
||||
afCoarseScan(context);
|
||||
afFineScan(context);
|
||||
} else {
|
||||
|
|
|
@ -87,7 +87,7 @@ int Agc::configure(IPAContext &context,
|
|||
[[maybe_unused]] const IPAConfigInfo &configInfo)
|
||||
{
|
||||
const IPASessionConfiguration &configuration = context.configuration;
|
||||
IPAFrameContext &frameContext = context.frameContext;
|
||||
IPAActiveState &activeState = context.activeState;
|
||||
|
||||
stride_ = configuration.grid.stride;
|
||||
|
||||
|
@ -99,8 +99,8 @@ int Agc::configure(IPAContext &context,
|
|||
maxAnalogueGain_ = std::min(configuration.agc.maxAnalogueGain, kMaxAnalogueGain);
|
||||
|
||||
/* Configure the default exposure and gain. */
|
||||
frameContext.agc.gain = std::max(minAnalogueGain_, kMinAnalogueGain);
|
||||
frameContext.agc.exposure = 10ms / configuration.sensor.lineDuration;
|
||||
activeState.agc.gain = std::max(minAnalogueGain_, kMinAnalogueGain);
|
||||
activeState.agc.exposure = 10ms / configuration.sensor.lineDuration;
|
||||
|
||||
frameCount_ = 0;
|
||||
return 0;
|
||||
|
@ -249,9 +249,10 @@ void Agc::computeExposure(IPAContext &context, double yGain,
|
|||
<< shutterTime << " and "
|
||||
<< stepGain;
|
||||
|
||||
IPAActiveState &activeState = context.activeState;
|
||||
/* Update the estimated exposure and gain. */
|
||||
frameContext.agc.exposure = shutterTime / configuration.sensor.lineDuration;
|
||||
frameContext.agc.gain = stepGain;
|
||||
activeState.agc.exposure = shutterTime / configuration.sensor.lineDuration;
|
||||
activeState.agc.gain = stepGain;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -279,7 +280,7 @@ void Agc::computeExposure(IPAContext &context, double yGain,
|
|||
* More detailed information can be found in:
|
||||
* https://en.wikipedia.org/wiki/Relative_luminance
|
||||
*/
|
||||
double Agc::estimateLuminance(IPAFrameContext &frameContext,
|
||||
double Agc::estimateLuminance(IPAActiveState &activeState,
|
||||
const ipu3_uapi_grid_config &grid,
|
||||
const ipu3_uapi_stats_3a *stats,
|
||||
double gain)
|
||||
|
@ -307,9 +308,9 @@ double Agc::estimateLuminance(IPAFrameContext &frameContext,
|
|||
* Apply the AWB gains to approximate colours correctly, use the Rec.
|
||||
* 601 formula to calculate the relative luminance, and normalize it.
|
||||
*/
|
||||
double ySum = redSum * frameContext.awb.gains.red * 0.299
|
||||
+ greenSum * frameContext.awb.gains.green * 0.587
|
||||
+ blueSum * frameContext.awb.gains.blue * 0.114;
|
||||
double ySum = redSum * activeState.awb.gains.red * 0.299
|
||||
+ greenSum * activeState.awb.gains.green * 0.587
|
||||
+ blueSum * activeState.awb.gains.blue * 0.114;
|
||||
|
||||
return ySum / (grid.height * grid.width) / 255;
|
||||
}
|
||||
|
@ -344,7 +345,7 @@ void Agc::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
|
|||
double yTarget = kRelativeLuminanceTarget;
|
||||
|
||||
for (unsigned int i = 0; i < 8; i++) {
|
||||
double yValue = estimateLuminance(context.frameContext,
|
||||
double yValue = estimateLuminance(context.activeState,
|
||||
context.configuration.grid.bdsGrid,
|
||||
stats, yGain);
|
||||
double extraGain = std::min(10.0, yTarget / (yValue + .001));
|
||||
|
|
|
@ -36,7 +36,7 @@ private:
|
|||
utils::Duration filterExposure(utils::Duration currentExposure);
|
||||
void computeExposure(IPAContext &context, double yGain,
|
||||
double iqMeanGain);
|
||||
double estimateLuminance(IPAFrameContext &frameContext,
|
||||
double estimateLuminance(IPAActiveState &activeState,
|
||||
const ipu3_uapi_grid_config &grid,
|
||||
const ipu3_uapi_stats_3a *stats,
|
||||
double gain);
|
||||
|
|
|
@ -396,10 +396,10 @@ void Awb::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
|
|||
* The results are cached, so if no results were calculated, we set the
|
||||
* cached values from asyncResults_ here.
|
||||
*/
|
||||
context.frameContext.awb.gains.blue = asyncResults_.blueGain;
|
||||
context.frameContext.awb.gains.green = asyncResults_.greenGain;
|
||||
context.frameContext.awb.gains.red = asyncResults_.redGain;
|
||||
context.frameContext.awb.temperatureK = asyncResults_.temperatureK;
|
||||
context.activeState.awb.gains.blue = asyncResults_.blueGain;
|
||||
context.activeState.awb.gains.green = asyncResults_.greenGain;
|
||||
context.activeState.awb.gains.red = asyncResults_.redGain;
|
||||
context.activeState.awb.temperatureK = asyncResults_.temperatureK;
|
||||
}
|
||||
|
||||
constexpr uint16_t Awb::threshold(float value)
|
||||
|
@ -450,10 +450,10 @@ void Awb::prepare(IPAContext &context, ipu3_uapi_params *params)
|
|||
params->acc_param.bnr.opt_center_sqr.y_sqr_reset = params->acc_param.bnr.opt_center.y_reset
|
||||
* params->acc_param.bnr.opt_center.y_reset;
|
||||
/* Convert to u3.13 fixed point values */
|
||||
params->acc_param.bnr.wb_gains.gr = 8192 * context.frameContext.awb.gains.green;
|
||||
params->acc_param.bnr.wb_gains.r = 8192 * context.frameContext.awb.gains.red;
|
||||
params->acc_param.bnr.wb_gains.b = 8192 * context.frameContext.awb.gains.blue;
|
||||
params->acc_param.bnr.wb_gains.gb = 8192 * context.frameContext.awb.gains.green;
|
||||
params->acc_param.bnr.wb_gains.gr = 8192 * context.activeState.awb.gains.green;
|
||||
params->acc_param.bnr.wb_gains.r = 8192 * context.activeState.awb.gains.red;
|
||||
params->acc_param.bnr.wb_gains.b = 8192 * context.activeState.awb.gains.blue;
|
||||
params->acc_param.bnr.wb_gains.gb = 8192 * context.activeState.awb.gains.green;
|
||||
|
||||
LOG(IPU3Awb, Debug) << "Color temperature estimated: " << asyncResults_.temperatureK;
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ int ToneMapping::configure(IPAContext &context,
|
|||
[[maybe_unused]] const IPAConfigInfo &configInfo)
|
||||
{
|
||||
/* Initialise tone mapping gamma value. */
|
||||
context.frameContext.toneMapping.gamma = 0.0;
|
||||
context.activeState.toneMapping.gamma = 0.0;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ void ToneMapping::prepare([[maybe_unused]] IPAContext &context,
|
|||
{
|
||||
/* Copy the calculated LUT into the parameters buffer. */
|
||||
memcpy(params->acc_param.gamma.gc_lut.lut,
|
||||
context.frameContext.toneMapping.gammaCorrection.lut,
|
||||
context.activeState.toneMapping.gammaCorrection.lut,
|
||||
IPU3_UAPI_GAMMA_CORR_LUT_ENTRIES *
|
||||
sizeof(params->acc_param.gamma.gc_lut.lut[0]));
|
||||
|
||||
|
@ -87,11 +87,11 @@ void ToneMapping::process(IPAContext &context,
|
|||
*/
|
||||
gamma_ = 1.1;
|
||||
|
||||
if (context.frameContext.toneMapping.gamma == gamma_)
|
||||
if (context.activeState.toneMapping.gamma == gamma_)
|
||||
return;
|
||||
|
||||
struct ipu3_uapi_gamma_corr_lut &lut =
|
||||
context.frameContext.toneMapping.gammaCorrection;
|
||||
context.activeState.toneMapping.gammaCorrection;
|
||||
|
||||
for (uint32_t i = 0; i < std::size(lut.lut); i++) {
|
||||
double j = static_cast<double>(i) / (std::size(lut.lut) - 1);
|
||||
|
@ -101,7 +101,7 @@ void ToneMapping::process(IPAContext &context,
|
|||
lut.lut[i] = gamma * 8191;
|
||||
}
|
||||
|
||||
context.frameContext.toneMapping.gamma = gamma_;
|
||||
context.activeState.toneMapping.gamma = gamma_;
|
||||
}
|
||||
|
||||
} /* namespace ipa::ipu3::algorithms */
|
||||
|
|
|
@ -24,19 +24,31 @@ namespace libcamera::ipa::ipu3 {
|
|||
* may also be updated in the start() operation.
|
||||
*/
|
||||
|
||||
/**
|
||||
* \struct IPAActiveState
|
||||
* \brief The active state of the IPA algorithms
|
||||
*
|
||||
* The IPA is fed with the statistics generated from the latest frame captured
|
||||
* by the hardware. The statistics are then processed by the IPA algorithms to
|
||||
* compute ISP parameters required for the next frame capture. The current state
|
||||
* of the algorithms is reflected through the IPAActiveState to store the values
|
||||
* most recently computed by the IPA algorithms.
|
||||
*/
|
||||
|
||||
/**
|
||||
* \struct IPAFrameContext
|
||||
* \brief Per-frame context for algorithms
|
||||
* \brief Context for a frame
|
||||
*
|
||||
* The frame context stores data specific to a single frame processed by the
|
||||
* IPA. Each frame processed by the IPA has a context associated with it,
|
||||
* accessible through the IPAContext structure.
|
||||
*
|
||||
* \todo Detail how to access contexts for a particular frame
|
||||
*
|
||||
* Each of the fields in the frame context belongs to either a specific
|
||||
* algorithm, or to the top-level IPA module. A field may be read by any
|
||||
* algorithm, but should only be written by its owner.
|
||||
* Fields in the frame context should reflect values and controls
|
||||
* associated with the specific frame as requested by the application, and
|
||||
* as configured by the hardware. Fields can be read by algorithms to
|
||||
* determine if they should update any specific action for this frame, and
|
||||
* finally to update the metadata control lists when the frame is fully
|
||||
* completed.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -49,10 +61,10 @@ namespace libcamera::ipa::ipu3 {
|
|||
* \var IPAContext::frameContext
|
||||
* \brief The frame context for the frame being processed
|
||||
*
|
||||
* \todo While the frame context is supposed to be per-frame, this
|
||||
* single frame context stores data related to both the current frame
|
||||
* and the previous frames, with fields being updated as the algorithms
|
||||
* are run. This needs to be turned into real per-frame data storage.
|
||||
* \var IPAContext::activeState
|
||||
* \brief The current state of IPA algorithms
|
||||
*
|
||||
* \todo The frame context needs to be turned into real per-frame data storage.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -78,17 +90,17 @@ namespace libcamera::ipa::ipu3 {
|
|||
*/
|
||||
|
||||
/**
|
||||
* \var IPAFrameContext::af
|
||||
* \var IPAActiveState::af
|
||||
* \brief Context for the Automatic Focus algorithm
|
||||
*
|
||||
* \struct IPAFrameContext::af
|
||||
* \var IPAFrameContext::af.focus
|
||||
* \struct IPAActiveState::af
|
||||
* \var IPAActiveState::af.focus
|
||||
* \brief Current position of the lens
|
||||
*
|
||||
* \var IPAFrameContext::af.maxVariance
|
||||
* \var IPAActiveState::af.maxVariance
|
||||
* \brief The maximum variance of the current image.
|
||||
*
|
||||
* \var IPAFrameContext::af.stable
|
||||
* \var IPAActiveState::af.stable
|
||||
* \brief It is set to true, if the best focus is found.
|
||||
*/
|
||||
|
||||
|
@ -121,44 +133,58 @@ namespace libcamera::ipa::ipu3 {
|
|||
*/
|
||||
|
||||
/**
|
||||
* \var IPAFrameContext::agc
|
||||
* \var IPAActiveState::agc
|
||||
* \brief Context for the Automatic Gain Control algorithm
|
||||
*
|
||||
* The exposure and gain determined are expected to be applied to the sensor
|
||||
* at the earliest opportunity.
|
||||
*
|
||||
* \var IPAFrameContext::agc.exposure
|
||||
* \var IPAActiveState::agc.exposure
|
||||
* \brief Exposure time expressed as a number of lines
|
||||
*
|
||||
* \var IPAFrameContext::agc.gain
|
||||
* \var IPAActiveState::agc.gain
|
||||
* \brief Analogue gain multiplier
|
||||
*
|
||||
* The gain should be adapted to the sensor specific gain code before applying.
|
||||
*/
|
||||
|
||||
/**
|
||||
* \var IPAFrameContext::awb
|
||||
* \var IPAActiveState::awb
|
||||
* \brief Context for the Automatic White Balance algorithm
|
||||
*
|
||||
* \struct IPAFrameContext::awb.gains
|
||||
* \struct IPAActiveState::awb.gains
|
||||
* \brief White balance gains
|
||||
*
|
||||
* \var IPAFrameContext::awb.gains.red
|
||||
* \var IPAActiveState::awb.gains.red
|
||||
* \brief White balance gain for R channel
|
||||
*
|
||||
* \var IPAFrameContext::awb.gains.green
|
||||
* \var IPAActiveState::awb.gains.green
|
||||
* \brief White balance gain for G channel
|
||||
*
|
||||
* \var IPAFrameContext::awb.gains.blue
|
||||
* \var IPAActiveState::awb.gains.blue
|
||||
* \brief White balance gain for B channel
|
||||
*
|
||||
* \var IPAFrameContext::awb.temperatureK
|
||||
* \var IPAActiveState::awb.temperatureK
|
||||
* \brief Estimated color temperature
|
||||
*/
|
||||
|
||||
/**
|
||||
* \var IPAActiveState::toneMapping
|
||||
* \brief Context for ToneMapping and Gamma control
|
||||
*
|
||||
* \var IPAActiveState::toneMapping.gamma
|
||||
* \brief Gamma value for the LUT
|
||||
*
|
||||
* \var IPAActiveState::toneMapping.gammaCorrection
|
||||
* \brief Per-pixel tone mapping implemented as a LUT
|
||||
*
|
||||
* The LUT structure is defined by the IPU3 kernel interface. See
|
||||
* <linux/intel-ipu3.h> struct ipu3_uapi_gamma_corr_lut for further details.
|
||||
*/
|
||||
|
||||
/**
|
||||
* \var IPAFrameContext::sensor
|
||||
* \brief Effective sensor values
|
||||
* \brief Effective sensor values that were applied for the frame
|
||||
*
|
||||
* \var IPAFrameContext::sensor.exposure
|
||||
* \brief Exposure time expressed as a number of lines
|
||||
|
@ -167,18 +193,4 @@ namespace libcamera::ipa::ipu3 {
|
|||
* \brief Analogue gain multiplier
|
||||
*/
|
||||
|
||||
/**
|
||||
* \var IPAFrameContext::toneMapping
|
||||
* \brief Context for ToneMapping and Gamma control
|
||||
*
|
||||
* \var IPAFrameContext::toneMapping.gamma
|
||||
* \brief Gamma value for the LUT
|
||||
*
|
||||
* \var IPAFrameContext::toneMapping.gammaCorrection
|
||||
* \brief Per-pixel tone mapping implemented as a LUT
|
||||
*
|
||||
* The LUT structure is defined by the IPU3 kernel interface. See
|
||||
* <linux/intel-ipu3.h> struct ipu3_uapi_gamma_corr_lut for further details.
|
||||
*/
|
||||
|
||||
} /* namespace libcamera::ipa::ipu3 */
|
||||
|
|
|
@ -42,7 +42,7 @@ struct IPASessionConfiguration {
|
|||
} sensor;
|
||||
};
|
||||
|
||||
struct IPAFrameContext {
|
||||
struct IPAActiveState {
|
||||
struct {
|
||||
uint32_t focus;
|
||||
double maxVariance;
|
||||
|
@ -64,19 +64,23 @@ struct IPAFrameContext {
|
|||
double temperatureK;
|
||||
} awb;
|
||||
|
||||
struct {
|
||||
uint32_t exposure;
|
||||
double gain;
|
||||
} sensor;
|
||||
|
||||
struct {
|
||||
double gamma;
|
||||
struct ipu3_uapi_gamma_corr_lut gammaCorrection;
|
||||
} toneMapping;
|
||||
};
|
||||
|
||||
struct IPAFrameContext {
|
||||
struct {
|
||||
uint32_t exposure;
|
||||
double gain;
|
||||
} sensor;
|
||||
};
|
||||
|
||||
struct IPAContext {
|
||||
IPASessionConfiguration configuration;
|
||||
IPAActiveState activeState;
|
||||
|
||||
IPAFrameContext frameContext;
|
||||
};
|
||||
|
||||
|
|
|
@ -454,7 +454,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo,
|
|||
|
||||
calculateBdsGrid(configInfo.bdsOutputSize);
|
||||
|
||||
/* Clean frameContext at each reconfiguration. */
|
||||
/* Clean IPAActiveState at each reconfiguration. */
|
||||
context_.activeState = {};
|
||||
context_.frameContext = {};
|
||||
|
||||
if (!validateSensorControls()) {
|
||||
|
@ -585,7 +586,7 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame,
|
|||
|
||||
ctrls.set(controls::AnalogueGain, context_.frameContext.sensor.gain);
|
||||
|
||||
ctrls.set(controls::ColourTemperature, context_.frameContext.awb.temperatureK);
|
||||
ctrls.set(controls::ColourTemperature, context_.activeState.awb.temperatureK);
|
||||
|
||||
ctrls.set(controls::ExposureTime, context_.frameContext.sensor.exposure * lineDuration);
|
||||
|
||||
|
@ -623,8 +624,8 @@ void IPAIPU3::queueRequest([[maybe_unused]] const uint32_t frame,
|
|||
*/
|
||||
void IPAIPU3::setControls(unsigned int frame)
|
||||
{
|
||||
int32_t exposure = context_.frameContext.agc.exposure;
|
||||
int32_t gain = camHelper_->gainCode(context_.frameContext.agc.gain);
|
||||
int32_t exposure = context_.activeState.agc.exposure;
|
||||
int32_t gain = camHelper_->gainCode(context_.activeState.agc.gain);
|
||||
|
||||
ControlList ctrls(sensorCtrls_);
|
||||
ctrls.set(V4L2_CID_EXPOSURE, exposure);
|
||||
|
@ -632,7 +633,7 @@ void IPAIPU3::setControls(unsigned int frame)
|
|||
|
||||
ControlList lensCtrls(lensCtrls_);
|
||||
lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE,
|
||||
static_cast<int32_t>(context_.frameContext.af.focus));
|
||||
static_cast<int32_t>(context_.activeState.af.focus));
|
||||
|
||||
setSensorControls.emit(frame, ctrls, lensCtrls);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue