ipa: raspberrypi: Code refactoring to match style guidelines

Refactor all the source files in src/ipa/raspberrypi/ to match the recommended
formatting guidelines for the libcamera project. The vast majority of changes
in this commit comprise of switching from snake_case to CamelCase, and starting
class member functions with a lower case character.

Signed-off-by: Naushir Patuck <naush@raspberrypi.com>
Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
This commit is contained in:
Naushir Patuck 2022-07-27 09:55:17 +01:00 committed by Laurent Pinchart
parent b4a3eb6b98
commit 177df04d2b
63 changed files with 2093 additions and 2161 deletions

View file

@ -24,16 +24,16 @@ namespace libcamera {
LOG_DECLARE_CATEGORY(IPARPI)
}
static std::map<std::string, CamHelperCreateFunc> cam_helpers;
static std::map<std::string, CamHelperCreateFunc> camHelpers;
CamHelper *CamHelper::Create(std::string const &cam_name)
CamHelper *CamHelper::create(std::string const &camName)
{
/*
* CamHelpers get registered by static RegisterCamHelper
* initialisers.
*/
for (auto &p : cam_helpers) {
if (cam_name.find(p.first) != std::string::npos)
for (auto &p : camHelpers) {
if (camName.find(p.first) != std::string::npos)
return p.second();
}
@ -50,35 +50,35 @@ CamHelper::~CamHelper()
{
}
void CamHelper::Prepare(Span<const uint8_t> buffer,
void CamHelper::prepare(Span<const uint8_t> buffer,
Metadata &metadata)
{
parseEmbeddedData(buffer, metadata);
}
void CamHelper::Process([[maybe_unused]] StatisticsPtr &stats,
void CamHelper::process([[maybe_unused]] StatisticsPtr &stats,
[[maybe_unused]] Metadata &metadata)
{
}
uint32_t CamHelper::ExposureLines(const Duration exposure) const
uint32_t CamHelper::exposureLines(const Duration exposure) const
{
assert(initialized_);
return exposure / mode_.line_length;
return exposure / mode_.lineLength;
}
Duration CamHelper::Exposure(uint32_t exposure_lines) const
Duration CamHelper::exposure(uint32_t exposureLines) const
{
assert(initialized_);
return exposure_lines * mode_.line_length;
return exposureLines * mode_.lineLength;
}
uint32_t CamHelper::GetVBlanking(Duration &exposure,
uint32_t CamHelper::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLengthMin, frameLengthMax, vblank;
uint32_t exposureLines = ExposureLines(exposure);
uint32_t exposureLines = CamHelper::exposureLines(exposure);
assert(initialized_);
@ -86,15 +86,15 @@ uint32_t CamHelper::GetVBlanking(Duration &exposure,
* minFrameDuration and maxFrameDuration are clamped by the caller
* based on the limits for the active sensor mode.
*/
frameLengthMin = minFrameDuration / mode_.line_length;
frameLengthMax = maxFrameDuration / mode_.line_length;
frameLengthMin = minFrameDuration / mode_.lineLength;
frameLengthMax = maxFrameDuration / mode_.lineLength;
/*
* Limit the exposure to the maximum frame duration requested, and
* re-calculate if it has been clipped.
*/
exposureLines = std::min(frameLengthMax - frameIntegrationDiff_, exposureLines);
exposure = Exposure(exposureLines);
exposure = CamHelper::exposure(exposureLines);
/* Limit the vblank to the range allowed by the frame length limits. */
vblank = std::clamp(exposureLines + frameIntegrationDiff_,
@ -102,34 +102,34 @@ uint32_t CamHelper::GetVBlanking(Duration &exposure,
return vblank;
}
void CamHelper::SetCameraMode(const CameraMode &mode)
void CamHelper::setCameraMode(const CameraMode &mode)
{
mode_ = mode;
if (parser_) {
parser_->SetBitsPerPixel(mode.bitdepth);
parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
parser_->setBitsPerPixel(mode.bitdepth);
parser_->setLineLengthBytes(0); /* We use SetBufferSize. */
}
initialized_ = true;
}
void CamHelper::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelper::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
/*
* These values are correct for many sensors. Other sensors will
* need to over-ride this function.
*/
exposure_delay = 2;
gain_delay = 1;
vblank_delay = 2;
exposureDelay = 2;
gainDelay = 1;
vblankDelay = 2;
}
bool CamHelper::SensorEmbeddedDataPresent() const
bool CamHelper::sensorEmbeddedDataPresent() const
{
return false;
}
double CamHelper::GetModeSensitivity([[maybe_unused]] const CameraMode &mode) const
double CamHelper::getModeSensitivity([[maybe_unused]] const CameraMode &mode) const
{
/*
* Most sensors have the same sensitivity in every mode, but this
@ -140,7 +140,7 @@ double CamHelper::GetModeSensitivity([[maybe_unused]] const CameraMode &mode) co
return 1.0;
}
unsigned int CamHelper::HideFramesStartup() const
unsigned int CamHelper::hideFramesStartup() const
{
/*
* The number of frames when a camera first starts that shouldn't be
@ -149,19 +149,19 @@ unsigned int CamHelper::HideFramesStartup() const
return 0;
}
unsigned int CamHelper::HideFramesModeSwitch() const
unsigned int CamHelper::hideFramesModeSwitch() const
{
/* After a mode switch, many sensors return valid frames immediately. */
return 0;
}
unsigned int CamHelper::MistrustFramesStartup() const
unsigned int CamHelper::mistrustFramesStartup() const
{
/* Many sensors return a single bad frame on start-up. */
return 1;
}
unsigned int CamHelper::MistrustFramesModeSwitch() const
unsigned int CamHelper::mistrustFramesModeSwitch() const
{
/* Many sensors return valid metadata immediately. */
return 0;
@ -176,13 +176,13 @@ void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
if (buffer.empty())
return;
if (parser_->Parse(buffer, registers) != MdParser::Status::OK) {
if (parser_->parse(buffer, registers) != MdParser::Status::OK) {
LOG(IPARPI, Error) << "Embedded data buffer parsing failed";
return;
}
PopulateMetadata(registers, parsedMetadata);
metadata.Merge(parsedMetadata);
populateMetadata(registers, parsedMetadata);
metadata.merge(parsedMetadata);
/*
* Overwrite the exposure/gain, frame length and sensor temperature values
@ -190,30 +190,30 @@ void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
* Fetch it first in case any other fields were set meaningfully.
*/
DeviceStatus deviceStatus, parsedDeviceStatus;
if (metadata.Get("device.status", deviceStatus) ||
parsedMetadata.Get("device.status", parsedDeviceStatus)) {
if (metadata.get("device.status", deviceStatus) ||
parsedMetadata.get("device.status", parsedDeviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found";
return;
}
deviceStatus.shutter_speed = parsedDeviceStatus.shutter_speed;
deviceStatus.analogue_gain = parsedDeviceStatus.analogue_gain;
deviceStatus.frame_length = parsedDeviceStatus.frame_length;
if (parsedDeviceStatus.sensor_temperature)
deviceStatus.sensor_temperature = parsedDeviceStatus.sensor_temperature;
deviceStatus.shutterSpeed = parsedDeviceStatus.shutterSpeed;
deviceStatus.analogueGain = parsedDeviceStatus.analogueGain;
deviceStatus.frameLength = parsedDeviceStatus.frameLength;
if (parsedDeviceStatus.sensorTemperature)
deviceStatus.sensorTemperature = parsedDeviceStatus.sensorTemperature;
LOG(IPARPI, Debug) << "Metadata updated - " << deviceStatus;
metadata.Set("device.status", deviceStatus);
metadata.set("device.status", deviceStatus);
}
void CamHelper::PopulateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
void CamHelper::populateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
[[maybe_unused]] Metadata &metadata) const
{
}
RegisterCamHelper::RegisterCamHelper(char const *cam_name,
CamHelperCreateFunc create_func)
RegisterCamHelper::RegisterCamHelper(char const *camName,
CamHelperCreateFunc createFunc)
{
cam_helpers[std::string(cam_name)] = create_func;
camHelpers[std::string(camName)] = createFunc;
}

View file

@ -69,33 +69,33 @@ namespace RPiController {
class CamHelper
{
public:
static CamHelper *Create(std::string const &cam_name);
static CamHelper *create(std::string const &camName);
CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff);
virtual ~CamHelper();
void SetCameraMode(const CameraMode &mode);
virtual void Prepare(libcamera::Span<const uint8_t> buffer,
void setCameraMode(const CameraMode &mode);
virtual void prepare(libcamera::Span<const uint8_t> buffer,
Metadata &metadata);
virtual void Process(StatisticsPtr &stats, Metadata &metadata);
virtual uint32_t ExposureLines(libcamera::utils::Duration exposure) const;
virtual libcamera::utils::Duration Exposure(uint32_t exposure_lines) const;
virtual uint32_t GetVBlanking(libcamera::utils::Duration &exposure,
virtual void process(StatisticsPtr &stats, Metadata &metadata);
virtual uint32_t exposureLines(libcamera::utils::Duration exposure) const;
virtual libcamera::utils::Duration exposure(uint32_t exposureLines) const;
virtual uint32_t getVBlanking(libcamera::utils::Duration &exposure,
libcamera::utils::Duration minFrameDuration,
libcamera::utils::Duration maxFrameDuration) const;
virtual uint32_t GainCode(double gain) const = 0;
virtual double Gain(uint32_t gain_code) const = 0;
virtual void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const;
virtual bool SensorEmbeddedDataPresent() const;
virtual double GetModeSensitivity(const CameraMode &mode) const;
virtual unsigned int HideFramesStartup() const;
virtual unsigned int HideFramesModeSwitch() const;
virtual unsigned int MistrustFramesStartup() const;
virtual unsigned int MistrustFramesModeSwitch() const;
virtual uint32_t gainCode(double gain) const = 0;
virtual double gain(uint32_t gainCode) const = 0;
virtual void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const;
virtual bool sensorEmbeddedDataPresent() const;
virtual double getModeSensitivity(const CameraMode &mode) const;
virtual unsigned int hideFramesStartup() const;
virtual unsigned int hideFramesModeSwitch() const;
virtual unsigned int mistrustFramesStartup() const;
virtual unsigned int mistrustFramesModeSwitch() const;
protected:
void parseEmbeddedData(libcamera::Span<const uint8_t> buffer,
Metadata &metadata);
virtual void PopulateMetadata(const MdParser::RegisterMap &registers,
virtual void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const;
std::unique_ptr<MdParser> parser_;
@ -116,8 +116,8 @@ private:
typedef CamHelper *(*CamHelperCreateFunc)();
struct RegisterCamHelper
{
RegisterCamHelper(char const *cam_name,
CamHelperCreateFunc create_func);
RegisterCamHelper(char const *camName,
CamHelperCreateFunc createFunc);
};
} // namespace RPi

View file

@ -39,10 +39,10 @@ class CamHelperImx219 : public CamHelper
{
public:
CamHelperImx219();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
unsigned int MistrustFramesModeSwitch() const override;
bool SensorEmbeddedDataPresent() const override;
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
unsigned int mistrustFramesModeSwitch() const override;
bool sensorEmbeddedDataPresent() const override;
private:
/*
@ -51,7 +51,7 @@ private:
*/
static constexpr int frameIntegrationDiff = 4;
void PopulateMetadata(const MdParser::RegisterMap &registers,
void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@ -64,17 +64,17 @@ CamHelperImx219::CamHelperImx219()
{
}
uint32_t CamHelperImx219::GainCode(double gain) const
uint32_t CamHelperImx219::gainCode(double gain) const
{
return (uint32_t)(256 - 256 / gain);
}
double CamHelperImx219::Gain(uint32_t gain_code) const
double CamHelperImx219::gain(uint32_t gainCode) const
{
return 256.0 / (256 - gain_code);
return 256.0 / (256 - gainCode);
}
unsigned int CamHelperImx219::MistrustFramesModeSwitch() const
unsigned int CamHelperImx219::mistrustFramesModeSwitch() const
{
/*
* For reasons unknown, we do occasionally get a bogus metadata frame
@ -84,26 +84,26 @@ unsigned int CamHelperImx219::MistrustFramesModeSwitch() const
return 1;
}
bool CamHelperImx219::SensorEmbeddedDataPresent() const
bool CamHelperImx219::sensorEmbeddedDataPresent() const
{
return ENABLE_EMBEDDED_DATA;
}
void CamHelperImx219::PopulateMetadata(const MdParser::RegisterMap &registers,
void CamHelperImx219::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogue_gain = Gain(registers.at(gainReg));
deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogueGain = gain(registers.at(gainReg));
deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
metadata.Set("device.status", deviceStatus);
metadata.set("device.status", deviceStatus);
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperImx219();
}
static RegisterCamHelper reg("imx219", &Create);
static RegisterCamHelper reg("imx219", &create);

View file

@ -15,11 +15,11 @@ class CamHelperImx290 : public CamHelper
{
public:
CamHelperImx290();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const override;
unsigned int HideFramesModeSwitch() const override;
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const override;
unsigned int hideFramesModeSwitch() const override;
private:
/*
@ -34,34 +34,34 @@ CamHelperImx290::CamHelperImx290()
{
}
uint32_t CamHelperImx290::GainCode(double gain) const
uint32_t CamHelperImx290::gainCode(double gain) const
{
int code = 66.6667 * log10(gain);
return std::max(0, std::min(code, 0xf0));
}
double CamHelperImx290::Gain(uint32_t gain_code) const
double CamHelperImx290::gain(uint32_t gainCode) const
{
return pow(10, 0.015 * gain_code);
return pow(10, 0.015 * gainCode);
}
void CamHelperImx290::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelperImx290::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
exposure_delay = 2;
gain_delay = 2;
vblank_delay = 2;
exposureDelay = 2;
gainDelay = 2;
vblankDelay = 2;
}
unsigned int CamHelperImx290::HideFramesModeSwitch() const
unsigned int CamHelperImx290::hideFramesModeSwitch() const
{
/* After a mode switch, we seem to get 1 bad frame. */
return 1;
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperImx290();
}
static RegisterCamHelper reg("imx290", &Create);
static RegisterCamHelper reg("imx290", &create);

View file

@ -19,10 +19,10 @@ class CamHelperImx296 : public CamHelper
{
public:
CamHelperImx296();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
uint32_t ExposureLines(Duration exposure) const override;
Duration Exposure(uint32_t exposure_lines) const override;
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
uint32_t exposureLines(Duration exposure) const override;
Duration exposure(uint32_t exposureLines) const override;
private:
static constexpr uint32_t maxGainCode = 239;
@ -40,30 +40,30 @@ CamHelperImx296::CamHelperImx296()
{
}
uint32_t CamHelperImx296::GainCode(double gain) const
uint32_t CamHelperImx296::gainCode(double gain) const
{
uint32_t code = 20 * std::log10(gain) * 10;
return std::min(code, maxGainCode);
}
double CamHelperImx296::Gain(uint32_t gain_code) const
double CamHelperImx296::gain(uint32_t gainCode) const
{
return std::pow(10.0, gain_code / 200.0);
return std::pow(10.0, gainCode / 200.0);
}
uint32_t CamHelperImx296::ExposureLines(Duration exposure) const
uint32_t CamHelperImx296::exposureLines(Duration exposure) const
{
return (exposure - 14.26us) / timePerLine;
}
Duration CamHelperImx296::Exposure(uint32_t exposure_lines) const
Duration CamHelperImx296::exposure(uint32_t exposureLines) const
{
return exposure_lines * timePerLine + 14.26us;
return exposureLines * timePerLine + 14.26us;
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperImx296();
}
static RegisterCamHelper reg("imx296", &Create);
static RegisterCamHelper reg("imx296", &create);

View file

@ -43,14 +43,14 @@ class CamHelperImx477 : public CamHelper
{
public:
CamHelperImx477();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
void prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
uint32_t getVBlanking(Duration &exposure, Duration minFrameDuration,
Duration maxFrameDuration) const override;
void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const override;
bool SensorEmbeddedDataPresent() const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const override;
bool sensorEmbeddedDataPresent() const override;
private:
/*
@ -63,7 +63,7 @@ private:
/* Largest long exposure scale factor given as a left shift on the frame length. */
static constexpr int longExposureShiftMax = 7;
void PopulateMetadata(const MdParser::RegisterMap &registers,
void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@ -72,22 +72,22 @@ CamHelperImx477::CamHelperImx477()
{
}
uint32_t CamHelperImx477::GainCode(double gain) const
uint32_t CamHelperImx477::gainCode(double gain) const
{
return static_cast<uint32_t>(1024 - 1024 / gain);
}
double CamHelperImx477::Gain(uint32_t gain_code) const
double CamHelperImx477::gain(uint32_t gainCode) const
{
return 1024.0 / (1024 - gain_code);
return 1024.0 / (1024 - gainCode);
}
void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
void CamHelperImx477::prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
MdParser::RegisterMap registers;
DeviceStatus deviceStatus;
if (metadata.Get("device.status", deviceStatus)) {
if (metadata.get("device.status", deviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
return;
}
@ -105,27 +105,27 @@ void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &m
* Otherwise, all values are updated with what is reported in the
* embedded data.
*/
if (deviceStatus.frame_length > frameLengthMax) {
if (deviceStatus.frameLength > frameLengthMax) {
DeviceStatus parsedDeviceStatus;
metadata.Get("device.status", parsedDeviceStatus);
parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
parsedDeviceStatus.frame_length = deviceStatus.frame_length;
metadata.Set("device.status", parsedDeviceStatus);
metadata.get("device.status", parsedDeviceStatus);
parsedDeviceStatus.shutterSpeed = deviceStatus.shutterSpeed;
parsedDeviceStatus.frameLength = deviceStatus.frameLength;
metadata.set("device.status", parsedDeviceStatus);
LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
<< parsedDeviceStatus;
}
}
uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
uint32_t CamHelperImx477::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLength, exposureLines;
unsigned int shift = 0;
frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
frameLength = mode_.height + CamHelper::getVBlanking(exposure, minFrameDuration,
maxFrameDuration);
/*
* Check if the frame length calculated needs to be setup for long
@ -144,43 +144,43 @@ uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
if (shift) {
/* Account for any rounding in the scaled frame length value. */
frameLength <<= shift;
exposureLines = ExposureLines(exposure);
exposureLines = CamHelperImx477::exposureLines(exposure);
exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
exposure = Exposure(exposureLines);
exposure = CamHelperImx477::exposure(exposureLines);
}
return frameLength - mode_.height;
}
void CamHelperImx477::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelperImx477::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
exposure_delay = 2;
gain_delay = 2;
vblank_delay = 3;
exposureDelay = 2;
gainDelay = 2;
vblankDelay = 3;
}
bool CamHelperImx477::SensorEmbeddedDataPresent() const
bool CamHelperImx477::sensorEmbeddedDataPresent() const
{
return true;
}
void CamHelperImx477::PopulateMetadata(const MdParser::RegisterMap &registers,
void CamHelperImx477::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
deviceStatus.sensor_temperature = std::clamp<int8_t>(registers.at(temperatureReg), -20, 80);
deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogueGain = gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
deviceStatus.sensorTemperature = std::clamp<int8_t>(registers.at(temperatureReg), -20, 80);
metadata.Set("device.status", deviceStatus);
metadata.set("device.status", deviceStatus);
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperImx477();
}
static RegisterCamHelper reg("imx477", &Create);
static RegisterCamHelper reg("imx477", &create);

View file

@ -43,14 +43,14 @@ class CamHelperImx519 : public CamHelper
{
public:
CamHelperImx519();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
void prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
uint32_t getVBlanking(Duration &exposure, Duration minFrameDuration,
Duration maxFrameDuration) const override;
void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const override;
bool SensorEmbeddedDataPresent() const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const override;
bool sensorEmbeddedDataPresent() const override;
private:
/*
@ -63,7 +63,7 @@ private:
/* Largest long exposure scale factor given as a left shift on the frame length. */
static constexpr int longExposureShiftMax = 7;
void PopulateMetadata(const MdParser::RegisterMap &registers,
void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@ -72,22 +72,22 @@ CamHelperImx519::CamHelperImx519()
{
}
uint32_t CamHelperImx519::GainCode(double gain) const
uint32_t CamHelperImx519::gainCode(double gain) const
{
return static_cast<uint32_t>(1024 - 1024 / gain);
}
double CamHelperImx519::Gain(uint32_t gain_code) const
double CamHelperImx519::gain(uint32_t gainCode) const
{
return 1024.0 / (1024 - gain_code);
return 1024.0 / (1024 - gainCode);
}
void CamHelperImx519::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
void CamHelperImx519::prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
MdParser::RegisterMap registers;
DeviceStatus deviceStatus;
if (metadata.Get("device.status", deviceStatus)) {
if (metadata.get("device.status", deviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
return;
}
@ -105,27 +105,27 @@ void CamHelperImx519::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &m
* Otherwise, all values are updated with what is reported in the
* embedded data.
*/
if (deviceStatus.frame_length > frameLengthMax) {
if (deviceStatus.frameLength > frameLengthMax) {
DeviceStatus parsedDeviceStatus;
metadata.Get("device.status", parsedDeviceStatus);
parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
parsedDeviceStatus.frame_length = deviceStatus.frame_length;
metadata.Set("device.status", parsedDeviceStatus);
metadata.get("device.status", parsedDeviceStatus);
parsedDeviceStatus.shutterSpeed = deviceStatus.shutterSpeed;
parsedDeviceStatus.frameLength = deviceStatus.frameLength;
metadata.set("device.status", parsedDeviceStatus);
LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
<< parsedDeviceStatus;
}
}
uint32_t CamHelperImx519::GetVBlanking(Duration &exposure,
uint32_t CamHelperImx519::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLength, exposureLines;
unsigned int shift = 0;
frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
frameLength = mode_.height + CamHelper::getVBlanking(exposure, minFrameDuration,
maxFrameDuration);
/*
* Check if the frame length calculated needs to be setup for long
@ -144,42 +144,42 @@ uint32_t CamHelperImx519::GetVBlanking(Duration &exposure,
if (shift) {
/* Account for any rounding in the scaled frame length value. */
frameLength <<= shift;
exposureLines = ExposureLines(exposure);
exposureLines = CamHelperImx519::exposureLines(exposure);
exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
exposure = Exposure(exposureLines);
exposure = CamHelperImx519::exposure(exposureLines);
}
return frameLength - mode_.height;
}
void CamHelperImx519::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelperImx519::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
exposure_delay = 2;
gain_delay = 2;
vblank_delay = 3;
exposureDelay = 2;
gainDelay = 2;
vblankDelay = 3;
}
bool CamHelperImx519::SensorEmbeddedDataPresent() const
bool CamHelperImx519::sensorEmbeddedDataPresent() const
{
return true;
}
void CamHelperImx519::PopulateMetadata(const MdParser::RegisterMap &registers,
void CamHelperImx519::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
deviceStatus.analogueGain = gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
metadata.Set("device.status", deviceStatus);
metadata.set("device.status", deviceStatus);
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperImx519();
}
static RegisterCamHelper reg("imx519", &Create);
static RegisterCamHelper reg("imx519", &create);

View file

@ -15,14 +15,14 @@ class CamHelperOv5647 : public CamHelper
{
public:
CamHelperOv5647();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const override;
unsigned int HideFramesStartup() const override;
unsigned int HideFramesModeSwitch() const override;
unsigned int MistrustFramesStartup() const override;
unsigned int MistrustFramesModeSwitch() const override;
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const override;
unsigned int hideFramesStartup() const override;
unsigned int hideFramesModeSwitch() const override;
unsigned int mistrustFramesStartup() const override;
unsigned int mistrustFramesModeSwitch() const override;
private:
/*
@ -42,29 +42,29 @@ CamHelperOv5647::CamHelperOv5647()
{
}
uint32_t CamHelperOv5647::GainCode(double gain) const
uint32_t CamHelperOv5647::gainCode(double gain) const
{
return static_cast<uint32_t>(gain * 16.0);
}
double CamHelperOv5647::Gain(uint32_t gain_code) const
double CamHelperOv5647::gain(uint32_t gainCode) const
{
return static_cast<double>(gain_code) / 16.0;
return static_cast<double>(gainCode) / 16.0;
}
void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelperOv5647::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
/*
* We run this sensor in a mode where the gain delay is bumped up to
* 2. It seems to be the only way to make the delays "predictable".
*/
exposure_delay = 2;
gain_delay = 2;
vblank_delay = 2;
exposureDelay = 2;
gainDelay = 2;
vblankDelay = 2;
}
unsigned int CamHelperOv5647::HideFramesStartup() const
unsigned int CamHelperOv5647::hideFramesStartup() const
{
/*
* On startup, we get a couple of under-exposed frames which
@ -73,7 +73,7 @@ unsigned int CamHelperOv5647::HideFramesStartup() const
return 2;
}
unsigned int CamHelperOv5647::HideFramesModeSwitch() const
unsigned int CamHelperOv5647::hideFramesModeSwitch() const
{
/*
* After a mode switch, we get a couple of under-exposed frames which
@ -82,7 +82,7 @@ unsigned int CamHelperOv5647::HideFramesModeSwitch() const
return 2;
}
unsigned int CamHelperOv5647::MistrustFramesStartup() const
unsigned int CamHelperOv5647::mistrustFramesStartup() const
{
/*
* First couple of frames are under-exposed and are no good for control
@ -91,7 +91,7 @@ unsigned int CamHelperOv5647::MistrustFramesStartup() const
return 2;
}
unsigned int CamHelperOv5647::MistrustFramesModeSwitch() const
unsigned int CamHelperOv5647::mistrustFramesModeSwitch() const
{
/*
* First couple of frames are under-exposed even after a simple
@ -100,9 +100,9 @@ unsigned int CamHelperOv5647::MistrustFramesModeSwitch() const
return 2;
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperOv5647();
}
static RegisterCamHelper reg("ov5647", &Create);
static RegisterCamHelper reg("ov5647", &create);

View file

@ -15,10 +15,10 @@ class CamHelperOv9281 : public CamHelper
{
public:
CamHelperOv9281();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
void GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const override;
uint32_t gainCode(double gain) const override;
double gain(uint32_t gainCode) const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const override;
private:
/*
@ -38,28 +38,28 @@ CamHelperOv9281::CamHelperOv9281()
{
}
uint32_t CamHelperOv9281::GainCode(double gain) const
uint32_t CamHelperOv9281::gainCode(double gain) const
{
return static_cast<uint32_t>(gain * 16.0);
}
double CamHelperOv9281::Gain(uint32_t gain_code) const
double CamHelperOv9281::gain(uint32_t gainCode) const
{
return static_cast<double>(gain_code) / 16.0;
return static_cast<double>(gainCode) / 16.0;
}
void CamHelperOv9281::GetDelays(int &exposure_delay, int &gain_delay,
int &vblank_delay) const
void CamHelperOv9281::getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay) const
{
/* The driver appears to behave as follows: */
exposure_delay = 2;
gain_delay = 2;
vblank_delay = 2;
exposureDelay = 2;
gainDelay = 2;
vblankDelay = 2;
}
static CamHelper *Create()
static CamHelper *create()
{
return new CamHelperOv9281();
}
static RegisterCamHelper reg("ov9281", &Create);
static RegisterCamHelper reg("ov9281", &create);

View file

@ -17,16 +17,15 @@ class AgcAlgorithm : public Algorithm
public:
AgcAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AGC algorithm must provide the following:
virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetEv(double ev) = 0;
virtual void SetFlickerPeriod(libcamera::utils::Duration flicker_period) = 0;
virtual void SetFixedShutter(libcamera::utils::Duration fixed_shutter) = 0;
virtual void SetMaxShutter(libcamera::utils::Duration max_shutter) = 0;
virtual void SetFixedAnalogueGain(double fixed_analogue_gain) = 0;
virtual void SetMeteringMode(std::string const &metering_mode_name) = 0;
virtual void SetExposureMode(std::string const &exposure_mode_name) = 0;
virtual void
SetConstraintMode(std::string const &contraint_mode_name) = 0;
virtual unsigned int getConvergenceFrames() const = 0;
virtual void setEv(double ev) = 0;
virtual void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) = 0;
virtual void setFixedShutter(libcamera::utils::Duration fixedShutter) = 0;
virtual void setMaxShutter(libcamera::utils::Duration maxShutter) = 0;
virtual void setFixedAnalogueGain(double fixedAnalogueGain) = 0;
virtual void setMeteringMode(std::string const &meteringModeName) = 0;
virtual void setExposureMode(std::string const &exposureModeName) = 0;
virtual void setConstraintMode(std::string const &contraintModeName) = 0;
};
} // namespace RPiController

View file

@ -20,19 +20,19 @@ extern "C" {
// ignored until then.
struct AgcStatus {
libcamera::utils::Duration total_exposure_value; // value for all exposure and gain for this image
libcamera::utils::Duration target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
libcamera::utils::Duration shutter_time;
double analogue_gain;
char exposure_mode[32];
char constraint_mode[32];
char metering_mode[32];
libcamera::utils::Duration totalExposureValue; // value for all exposure and gain for this image
libcamera::utils::Duration targetExposureValue; // (unfiltered) target total exposure AGC is aiming for
libcamera::utils::Duration shutterTime;
double analogueGain;
char exposureMode[32];
char constraintMode[32];
char meteringMode[32];
double ev;
libcamera::utils::Duration flicker_period;
int floating_region_enable;
libcamera::utils::Duration fixed_shutter;
double fixed_analogue_gain;
double digital_gain;
libcamera::utils::Duration flickerPeriod;
int floatingRegionEnable;
libcamera::utils::Duration fixedShutter;
double fixedAnalogueGain;
double digitalGain;
int locked;
};

View file

@ -9,36 +9,38 @@
using namespace RPiController;
void Algorithm::Read([[maybe_unused]] boost::property_tree::ptree const &params)
void Algorithm::read([[maybe_unused]] boost::property_tree::ptree const &params)
{
}
void Algorithm::Initialise() {}
void Algorithm::initialise()
{
}
void Algorithm::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
void Algorithm::switchMode([[maybe_unused]] CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
}
void Algorithm::Prepare([[maybe_unused]] Metadata *image_metadata)
void Algorithm::prepare([[maybe_unused]] Metadata *imageMetadata)
{
}
void Algorithm::Process([[maybe_unused]] StatisticsPtr &stats,
[[maybe_unused]] Metadata *image_metadata)
void Algorithm::process([[maybe_unused]] StatisticsPtr &stats,
[[maybe_unused]] Metadata *imageMetadata)
{
}
// For registering algorithms with the system:
static std::map<std::string, AlgoCreateFunc> algorithms;
std::map<std::string, AlgoCreateFunc> const &RPiController::GetAlgorithms()
std::map<std::string, AlgoCreateFunc> const &RPiController::getAlgorithms()
{
return algorithms;
}
RegisterAlgorithm::RegisterAlgorithm(char const *name,
AlgoCreateFunc create_func)
AlgoCreateFunc createFunc)
{
algorithms[std::string(name)] = create_func;
algorithms[std::string(name)] = createFunc;
}

View file

@ -29,18 +29,18 @@ public:
{
}
virtual ~Algorithm() = default;
virtual char const *Name() const = 0;
virtual bool IsPaused() const { return paused_; }
virtual void Pause() { paused_ = true; }
virtual void Resume() { paused_ = false; }
virtual void Read(boost::property_tree::ptree const &params);
virtual void Initialise();
virtual void SwitchMode(CameraMode const &camera_mode, Metadata *metadata);
virtual void Prepare(Metadata *image_metadata);
virtual void Process(StatisticsPtr &stats, Metadata *image_metadata);
Metadata &GetGlobalMetadata() const
virtual char const *name() const = 0;
virtual bool isPaused() const { return paused_; }
virtual void pause() { paused_ = true; }
virtual void resume() { paused_ = false; }
virtual void read(boost::property_tree::ptree const &params);
virtual void initialise();
virtual void switchMode(CameraMode const &cameraMode, Metadata *metadata);
virtual void prepare(Metadata *imageMetadata);
virtual void process(StatisticsPtr &stats, Metadata *imageMetadata);
Metadata &getGlobalMetadata() const
{
return controller_->GetGlobalMetadata();
return controller_->getGlobalMetadata();
}
private:
@ -53,8 +53,8 @@ private:
typedef Algorithm *(*AlgoCreateFunc)(Controller *controller);
struct RegisterAlgorithm {
RegisterAlgorithm(char const *name, AlgoCreateFunc create_func);
RegisterAlgorithm(char const *name, AlgoCreateFunc createFunc);
};
std::map<std::string, AlgoCreateFunc> const &GetAlgorithms();
std::map<std::string, AlgoCreateFunc> const &getAlgorithms();
} // namespace RPiController

View file

@ -15,9 +15,9 @@ class AwbAlgorithm : public Algorithm
public:
AwbAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AWB algorithm must provide the following:
virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetMode(std::string const &mode_name) = 0;
virtual void SetManualGains(double manual_r, double manual_b) = 0;
virtual unsigned int getConvergenceFrames() const = 0;
virtual void setMode(std::string const &modeName) = 0;
virtual void setManualGains(double manualR, double manualB) = 0;
};
} // namespace RPiController

View file

@ -15,10 +15,10 @@ extern "C" {
struct AwbStatus {
char mode[32];
double temperature_K;
double gain_r;
double gain_g;
double gain_b;
double temperatureK;
double gainR;
double gainG;
double gainB;
};
#ifdef __cplusplus

View file

@ -13,9 +13,9 @@ extern "C" {
#endif
struct BlackLevelStatus {
uint16_t black_level_r; // out of 16 bits
uint16_t black_level_g;
uint16_t black_level_b;
uint16_t blackLevelR; // out of 16 bits
uint16_t blackLevelG;
uint16_t blackLevelB;
};
#ifdef __cplusplus

View file

@ -26,21 +26,21 @@ struct CameraMode {
// size in pixels of frames in this mode
uint16_t width, height;
// size of full resolution uncropped frame ("sensor frame")
uint16_t sensor_width, sensor_height;
uint16_t sensorWidth, sensorHeight;
// binning factor (1 = no binning, 2 = 2-pixel binning etc.)
uint8_t bin_x, bin_y;
uint8_t binX, binY;
// location of top left pixel in the sensor frame
uint16_t crop_x, crop_y;
// scaling factor (so if uncropped, width*scale_x is sensor_width)
double scale_x, scale_y;
uint16_t cropX, cropY;
// scaling factor (so if uncropped, width*scaleX is sensorWidth)
double scaleX, scaleY;
// scaling of the noise compared to the native sensor mode
double noise_factor;
double noiseFactor;
// line time
libcamera::utils::Duration line_length;
libcamera::utils::Duration lineLength;
// any camera transform *not* reflected already in the camera tuning
libcamera::Transform transform;
// minimum and maximum fame lengths in units of lines
uint32_t min_frame_length, max_frame_length;
uint32_t minFrameLength, maxFrameLength;
// sensitivity of this mode
double sensitivity;
};

View file

@ -15,7 +15,7 @@ class CcmAlgorithm : public Algorithm
public:
CcmAlgorithm(Controller *controller) : Algorithm(controller) {}
// A CCM algorithm must provide the following:
virtual void SetSaturation(double saturation) = 0;
virtual void setSaturation(double saturation) = 0;
};
} // namespace RPiController

View file

@ -15,8 +15,8 @@ class ContrastAlgorithm : public Algorithm
public:
ContrastAlgorithm(Controller *controller) : Algorithm(controller) {}
// A contrast algorithm must provide the following:
virtual void SetBrightness(double brightness) = 0;
virtual void SetContrast(double contrast) = 0;
virtual void setBrightness(double brightness) = 0;
virtual void setContrast(double contrast) = 0;
};
} // namespace RPiController

View file

@ -19,85 +19,87 @@ using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiController)
Controller::Controller()
: switch_mode_called_(false) {}
Controller::Controller(char const *json_filename)
: switch_mode_called_(false)
: switchModeCalled_(false)
{
Read(json_filename);
Initialise();
}
Controller::Controller(char const *jsonFilename)
: switchModeCalled_(false)
{
read(jsonFilename);
initialise();
}
Controller::~Controller() {}
void Controller::Read(char const *filename)
void Controller::read(char const *filename)
{
boost::property_tree::ptree root;
boost::property_tree::read_json(filename, root);
for (auto const &key_and_value : root) {
Algorithm *algo = CreateAlgorithm(key_and_value.first.c_str());
for (auto const &keyAndValue : root) {
Algorithm *algo = createAlgorithm(keyAndValue.first.c_str());
if (algo) {
algo->Read(key_and_value.second);
algo->read(keyAndValue.second);
algorithms_.push_back(AlgorithmPtr(algo));
} else
LOG(RPiController, Warning)
<< "No algorithm found for \"" << key_and_value.first << "\"";
<< "No algorithm found for \"" << keyAndValue.first << "\"";
}
}
Algorithm *Controller::CreateAlgorithm(char const *name)
Algorithm *Controller::createAlgorithm(char const *name)
{
auto it = GetAlgorithms().find(std::string(name));
return it != GetAlgorithms().end() ? (*it->second)(this) : nullptr;
auto it = getAlgorithms().find(std::string(name));
return it != getAlgorithms().end() ? (*it->second)(this) : nullptr;
}
void Controller::Initialise()
void Controller::initialise()
{
for (auto &algo : algorithms_)
algo->Initialise();
algo->initialise();
}
void Controller::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
void Controller::switchMode(CameraMode const &cameraMode, Metadata *metadata)
{
for (auto &algo : algorithms_)
algo->SwitchMode(camera_mode, metadata);
switch_mode_called_ = true;
algo->switchMode(cameraMode, metadata);
switchModeCalled_ = true;
}
void Controller::Prepare(Metadata *image_metadata)
void Controller::prepare(Metadata *imageMetadata)
{
assert(switch_mode_called_);
assert(switchModeCalled_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Prepare(image_metadata);
if (!algo->isPaused())
algo->prepare(imageMetadata);
}
void Controller::Process(StatisticsPtr stats, Metadata *image_metadata)
void Controller::process(StatisticsPtr stats, Metadata *imageMetadata)
{
assert(switch_mode_called_);
assert(switchModeCalled_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Process(stats, image_metadata);
if (!algo->isPaused())
algo->process(stats, imageMetadata);
}
Metadata &Controller::GetGlobalMetadata()
Metadata &Controller::getGlobalMetadata()
{
return global_metadata_;
return globalMetadata_;
}
Algorithm *Controller::GetAlgorithm(std::string const &name) const
Algorithm *Controller::getAlgorithm(std::string const &name) const
{
// The passed name must be the entire algorithm name, or must match the
// last part of it with a period (.) just before.
size_t name_len = name.length();
size_t nameLen = name.length();
for (auto &algo : algorithms_) {
char const *algo_name = algo->Name();
size_t algo_name_len = strlen(algo_name);
if (algo_name_len >= name_len &&
char const *algoName = algo->name();
size_t algoNameLen = strlen(algoName);
if (algoNameLen >= nameLen &&
strcasecmp(name.c_str(),
algo_name + algo_name_len - name_len) == 0 &&
(name_len == algo_name_len ||
algo_name[algo_name_len - name_len - 1] == '.'))
algoName + algoNameLen - nameLen) == 0 &&
(nameLen == algoNameLen ||
algoName[algoNameLen - nameLen - 1] == '.'))
return algo.get();
}
return nullptr;

View file

@ -34,21 +34,21 @@ class Controller
{
public:
Controller();
Controller(char const *json_filename);
Controller(char const *jsonFilename);
~Controller();
Algorithm *CreateAlgorithm(char const *name);
void Read(char const *filename);
void Initialise();
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata);
void Prepare(Metadata *image_metadata);
void Process(StatisticsPtr stats, Metadata *image_metadata);
Metadata &GetGlobalMetadata();
Algorithm *GetAlgorithm(std::string const &name) const;
Algorithm *createAlgorithm(char const *name);
void read(char const *filename);
void initialise();
void switchMode(CameraMode const &cameraMode, Metadata *metadata);
void prepare(Metadata *imageMetadata);
void process(StatisticsPtr stats, Metadata *imageMetadata);
Metadata &getGlobalMetadata();
Algorithm *getAlgorithm(std::string const &name) const;
protected:
Metadata global_metadata_;
Metadata globalMetadata_;
std::vector<AlgorithmPtr> algorithms_;
bool switch_mode_called_;
bool switchModeCalled_;
};
} // namespace RPiController

View file

@ -17,7 +17,7 @@ class DenoiseAlgorithm : public Algorithm
public:
DenoiseAlgorithm(Controller *controller) : Algorithm(controller) {}
// A Denoise algorithm must provide the following:
virtual void SetMode(DenoiseMode mode) = 0;
virtual void setMode(DenoiseMode mode) = 0;
};
} // namespace RPiController

View file

@ -13,8 +13,8 @@ extern "C" {
#endif
struct DenoiseStatus {
double noise_constant;
double noise_slope;
double noiseConstant;
double noiseSlope;
double strength;
unsigned int mode;
};

View file

@ -10,21 +10,21 @@ using namespace libcamera; /* for the Duration operator<< overload */
std::ostream &operator<<(std::ostream &out, const DeviceStatus &d)
{
out << "Exposure: " << d.shutter_speed
<< " Frame length: " << d.frame_length
<< " Gain: " << d.analogue_gain;
out << "Exposure: " << d.shutterSpeed
<< " Frame length: " << d.frameLength
<< " Gain: " << d.analogueGain;
if (d.aperture)
out << " Aperture: " << *d.aperture;
if (d.lens_position)
out << " Lens: " << *d.lens_position;
if (d.lensPosition)
out << " Lens: " << *d.lensPosition;
if (d.flash_intensity)
out << " Flash: " << *d.flash_intensity;
if (d.flashIntensity)
out << " Flash: " << *d.flashIntensity;
if (d.sensor_temperature)
out << " Temperature: " << *d.sensor_temperature;
if (d.sensorTemperature)
out << " Temperature: " << *d.sensorTemperature;
return out;
}

View file

@ -18,24 +18,24 @@
struct DeviceStatus {
DeviceStatus()
: shutter_speed(std::chrono::seconds(0)), frame_length(0),
analogue_gain(0.0)
: shutterSpeed(std::chrono::seconds(0)), frameLength(0),
analogueGain(0.0)
{
}
friend std::ostream &operator<<(std::ostream &out, const DeviceStatus &d);
/* time shutter is open */
libcamera::utils::Duration shutter_speed;
libcamera::utils::Duration shutterSpeed;
/* frame length given in number of lines */
uint32_t frame_length;
double analogue_gain;
uint32_t frameLength;
double analogueGain;
/* 1.0/distance-in-metres, or 0 if unknown */
std::optional<double> lens_position;
std::optional<double> lensPosition;
/* 1/f so that brightness quadruples when this doubles, or 0 if unknown */
std::optional<double> aperture;
/* proportional to brightness with 0 = no flash, 1 = maximum flash */
std::optional<double> flash_intensity;
std::optional<double> flashIntensity;
/* Sensor reported temperature value (in degrees) */
std::optional<double> sensor_temperature;
std::optional<double> sensorTemperature;
};

View file

@ -18,7 +18,7 @@ extern "C" {
struct FocusStatus {
unsigned int num;
uint32_t focus_measures[FOCUS_REGIONS];
uint32_t focusMeasures[FOCUS_REGIONS];
};
#ifdef __cplusplus

View file

@ -11,25 +11,25 @@
using namespace RPiController;
uint64_t Histogram::CumulativeFreq(double bin) const
uint64_t Histogram::cumulativeFreq(double bin) const
{
if (bin <= 0)
return 0;
else if (bin >= Bins())
return Total();
else if (bin >= bins())
return total();
int b = (int)bin;
return cumulative_[b] +
(bin - b) * (cumulative_[b + 1] - cumulative_[b]);
}
double Histogram::Quantile(double q, int first, int last) const
double Histogram::quantile(double q, int first, int last) const
{
if (first == -1)
first = 0;
if (last == -1)
last = cumulative_.size() - 2;
assert(first <= last);
uint64_t items = q * Total();
uint64_t items = q * total();
while (first < last) // binary search to find the right bin
{
int middle = (first + last) / 2;
@ -45,20 +45,20 @@ double Histogram::Quantile(double q, int first, int last) const
return first + frac;
}
double Histogram::InterQuantileMean(double q_lo, double q_hi) const
double Histogram::interQuantileMean(double qLo, double qHi) const
{
assert(q_hi > q_lo);
double p_lo = Quantile(q_lo);
double p_hi = Quantile(q_hi, (int)p_lo);
double sum_bin_freq = 0, cumul_freq = 0;
for (double p_next = floor(p_lo) + 1.0; p_next <= ceil(p_hi);
p_lo = p_next, p_next += 1.0) {
int bin = floor(p_lo);
assert(qHi > qLo);
double pLo = quantile(qLo);
double pHi = quantile(qHi, (int)pLo);
double sumBinFreq = 0, cumulFreq = 0;
for (double pNext = floor(pLo) + 1.0; pNext <= ceil(pHi);
pLo = pNext, pNext += 1.0) {
int bin = floor(pLo);
double freq = (cumulative_[bin + 1] - cumulative_[bin]) *
(std::min(p_next, p_hi) - p_lo);
sum_bin_freq += bin * freq;
cumul_freq += freq;
(std::min(pNext, pHi) - pLo);
sumBinFreq += bin * freq;
cumulFreq += freq;
}
// add 0.5 to give an average for bin mid-points
return sum_bin_freq / cumul_freq + 0.5;
return sumBinFreq / cumulFreq + 0.5;
}

View file

@ -27,15 +27,15 @@ public:
cumulative_.push_back(cumulative_.back() +
histogram[i]);
}
uint32_t Bins() const { return cumulative_.size() - 1; }
uint64_t Total() const { return cumulative_[cumulative_.size() - 1]; }
uint32_t bins() const { return cumulative_.size() - 1; }
uint64_t total() const { return cumulative_[cumulative_.size() - 1]; }
// Cumulative frequency up to a (fractional) point in a bin.
uint64_t CumulativeFreq(double bin) const;
uint64_t cumulativeFreq(double bin) const;
// Return the (fractional) bin of the point q (0 <= q <= 1) through the
// histogram. Optionally provide limits to help.
double Quantile(double q, int first = -1, int last = -1) const;
double quantile(double q, int first = -1, int last = -1) const;
// Return the average histogram bin value between the two quantiles.
double InterQuantileMean(double q_lo, double q_hi) const;
double interQuantileMean(double qLo, double qHi) const;
private:
std::vector<uint64_t> cumulative_;

View file

@ -22,26 +22,26 @@ public:
Metadata(Metadata const &other)
{
std::scoped_lock other_lock(other.mutex_);
std::scoped_lock otherLock(other.mutex_);
data_ = other.data_;
}
Metadata(Metadata &&other)
{
std::scoped_lock other_lock(other.mutex_);
std::scoped_lock otherLock(other.mutex_);
data_ = std::move(other.data_);
other.data_.clear();
}
template<typename T>
void Set(std::string const &tag, T const &value)
void set(std::string const &tag, T const &value)
{
std::scoped_lock lock(mutex_);
data_[tag] = value;
}
template<typename T>
int Get(std::string const &tag, T &value) const
int get(std::string const &tag, T &value) const
{
std::scoped_lock lock(mutex_);
auto it = data_.find(tag);
@ -51,7 +51,7 @@ public:
return 0;
}
void Clear()
void clear()
{
std::scoped_lock lock(mutex_);
data_.clear();
@ -72,14 +72,14 @@ public:
return *this;
}
void Merge(Metadata &other)
void merge(Metadata &other)
{
std::scoped_lock lock(mutex_, other.mutex_);
data_.merge(other.data_);
}
template<typename T>
T *GetLocked(std::string const &tag)
T *getLocked(std::string const &tag)
{
// This allows in-place access to the Metadata contents,
// for which you should be holding the lock.
@ -90,7 +90,7 @@ public:
}
template<typename T>
void SetLocked(std::string const &tag, T const &value)
void setLocked(std::string const &tag, T const &value)
{
// Use this only if you're holding the lock yourself.
data_[tag] = value;

View file

@ -13,8 +13,8 @@ extern "C" {
#endif
struct NoiseStatus {
double noise_constant;
double noise_slope;
double noiseConstant;
double noiseSlope;
};
#ifdef __cplusplus

View file

@ -12,7 +12,7 @@
using namespace RPiController;
void Pwl::Read(boost::property_tree::ptree const &params)
void Pwl::read(boost::property_tree::ptree const &params)
{
for (auto it = params.begin(); it != params.end(); it++) {
double x = it->second.get_value<double>();
@ -24,24 +24,24 @@ void Pwl::Read(boost::property_tree::ptree const &params)
assert(points_.size() >= 2);
}
void Pwl::Append(double x, double y, const double eps)
void Pwl::append(double x, double y, const double eps)
{
if (points_.empty() || points_.back().x + eps < x)
points_.push_back(Point(x, y));
}
void Pwl::Prepend(double x, double y, const double eps)
void Pwl::prepend(double x, double y, const double eps)
{
if (points_.empty() || points_.front().x - eps > x)
points_.insert(points_.begin(), Point(x, y));
}
Pwl::Interval Pwl::Domain() const
Pwl::Interval Pwl::domain() const
{
return Interval(points_[0].x, points_[points_.size() - 1].x);
}
Pwl::Interval Pwl::Range() const
Pwl::Interval Pwl::range() const
{
double lo = points_[0].y, hi = lo;
for (auto &p : points_)
@ -49,18 +49,16 @@ Pwl::Interval Pwl::Range() const
return Interval(lo, hi);
}
bool Pwl::Empty() const
bool Pwl::empty() const
{
return points_.empty();
}
double Pwl::Eval(double x, int *span_ptr, bool update_span) const
double Pwl::eval(double x, int *spanPtr, bool updateSpan) const
{
int span = findSpan(x, span_ptr && *span_ptr != -1
? *span_ptr
: points_.size() / 2 - 1);
if (span_ptr && update_span)
*span_ptr = span;
int span = findSpan(x, spanPtr && *spanPtr != -1 ? *spanPtr : points_.size() / 2 - 1);
if (spanPtr && updateSpan)
*spanPtr = span;
return points_[span].y +
(x - points_[span].x) * (points_[span + 1].y - points_[span].y) /
(points_[span + 1].x - points_[span].x);
@ -70,31 +68,31 @@ int Pwl::findSpan(double x, int span) const
{
// Pwls are generally small, so linear search may well be faster than
// binary, though could review this if large PWls start turning up.
int last_span = points_.size() - 2;
int lastSpan = points_.size() - 2;
// some algorithms may call us with span pointing directly at the last
// control point
span = std::max(0, std::min(last_span, span));
while (span < last_span && x >= points_[span + 1].x)
span = std::max(0, std::min(lastSpan, span));
while (span < lastSpan && x >= points_[span + 1].x)
span++;
while (span && x < points_[span].x)
span--;
return span;
}
Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
Pwl::PerpType Pwl::invert(Point const &xy, Point &perp, int &span,
const double eps) const
{
assert(span >= -1);
bool prev_off_end = false;
bool prevOffEnd = false;
for (span = span + 1; span < (int)points_.size() - 1; span++) {
Point span_vec = points_[span + 1] - points_[span];
double t = ((xy - points_[span]) % span_vec) / span_vec.Len2();
Point spanVec = points_[span + 1] - points_[span];
double t = ((xy - points_[span]) % spanVec) / spanVec.len2();
if (t < -eps) // off the start of this span
{
if (span == 0) {
perp = points_[span];
return PerpType::Start;
} else if (prev_off_end) {
} else if (prevOffEnd) {
perp = points_[span];
return PerpType::Vertex;
}
@ -104,32 +102,32 @@ Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
perp = points_[span + 1];
return PerpType::End;
}
prev_off_end = true;
prevOffEnd = true;
} else // a true perpendicular
{
perp = points_[span] + span_vec * t;
perp = points_[span] + spanVec * t;
return PerpType::Perpendicular;
}
}
return PerpType::None;
}
Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
Pwl Pwl::inverse(bool *trueInverse, const double eps) const
{
bool appended = false, prepended = false, neither = false;
Pwl inverse;
for (Point const &p : points_) {
if (inverse.Empty())
inverse.Append(p.y, p.x, eps);
if (inverse.empty())
inverse.append(p.y, p.x, eps);
else if (std::abs(inverse.points_.back().x - p.y) <= eps ||
std::abs(inverse.points_.front().x - p.y) <= eps)
/* do nothing */;
else if (p.y > inverse.points_.back().x) {
inverse.Append(p.y, p.x, eps);
inverse.append(p.y, p.x, eps);
appended = true;
} else if (p.y < inverse.points_.front().x) {
inverse.Prepend(p.y, p.x, eps);
inverse.prepend(p.y, p.x, eps);
prepended = true;
} else
neither = true;
@ -138,63 +136,65 @@ Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
// This is not a proper inverse if we found ourselves putting points
// onto both ends of the inverse, or if there were points that couldn't
// go on either.
if (true_inverse)
*true_inverse = !(neither || (appended && prepended));
if (trueInverse)
*trueInverse = !(neither || (appended && prepended));
return inverse;
}
Pwl Pwl::Compose(Pwl const &other, const double eps) const
Pwl Pwl::compose(Pwl const &other, const double eps) const
{
double this_x = points_[0].x, this_y = points_[0].y;
int this_span = 0, other_span = other.findSpan(this_y, 0);
Pwl result({ { this_x, other.Eval(this_y, &other_span, false) } });
while (this_span != (int)points_.size() - 1) {
double dx = points_[this_span + 1].x - points_[this_span].x,
dy = points_[this_span + 1].y - points_[this_span].y;
double thisX = points_[0].x, thisY = points_[0].y;
int thisSpan = 0, otherSpan = other.findSpan(thisY, 0);
Pwl result({ { thisX, other.eval(thisY, &otherSpan, false) } });
while (thisSpan != (int)points_.size() - 1) {
double dx = points_[thisSpan + 1].x - points_[thisSpan].x,
dy = points_[thisSpan + 1].y - points_[thisSpan].y;
if (abs(dy) > eps &&
other_span + 1 < (int)other.points_.size() &&
points_[this_span + 1].y >=
other.points_[other_span + 1].x + eps) {
otherSpan + 1 < (int)other.points_.size() &&
points_[thisSpan + 1].y >=
other.points_[otherSpan + 1].x + eps) {
// next control point in result will be where this
// function's y reaches the next span in other
this_x = points_[this_span].x +
(other.points_[other_span + 1].x -
points_[this_span].y) * dx / dy;
this_y = other.points_[++other_span].x;
} else if (abs(dy) > eps && other_span > 0 &&
points_[this_span + 1].y <=
other.points_[other_span - 1].x - eps) {
thisX = points_[thisSpan].x +
(other.points_[otherSpan + 1].x -
points_[thisSpan].y) *
dx / dy;
thisY = other.points_[++otherSpan].x;
} else if (abs(dy) > eps && otherSpan > 0 &&
points_[thisSpan + 1].y <=
other.points_[otherSpan - 1].x - eps) {
// next control point in result will be where this
// function's y reaches the previous span in other
this_x = points_[this_span].x +
(other.points_[other_span + 1].x -
points_[this_span].y) * dx / dy;
this_y = other.points_[--other_span].x;
thisX = points_[thisSpan].x +
(other.points_[otherSpan + 1].x -
points_[thisSpan].y) *
dx / dy;
thisY = other.points_[--otherSpan].x;
} else {
// we stay in the same span in other
this_span++;
this_x = points_[this_span].x,
this_y = points_[this_span].y;
thisSpan++;
thisX = points_[thisSpan].x,
thisY = points_[thisSpan].y;
}
result.Append(this_x, other.Eval(this_y, &other_span, false),
result.append(thisX, other.eval(thisY, &otherSpan, false),
eps);
}
return result;
}
void Pwl::Map(std::function<void(double x, double y)> f) const
void Pwl::map(std::function<void(double x, double y)> f) const
{
for (auto &pt : points_)
f(pt.x, pt.y);
}
void Pwl::Map2(Pwl const &pwl0, Pwl const &pwl1,
void Pwl::map2(Pwl const &pwl0, Pwl const &pwl1,
std::function<void(double x, double y0, double y1)> f)
{
int span0 = 0, span1 = 0;
double x = std::min(pwl0.points_[0].x, pwl1.points_[0].x);
f(x, pwl0.Eval(x, &span0, false), pwl1.Eval(x, &span1, false));
f(x, pwl0.eval(x, &span0, false), pwl1.eval(x, &span1, false));
while (span0 < (int)pwl0.points_.size() - 1 ||
span1 < (int)pwl1.points_.size() - 1) {
if (span0 == (int)pwl0.points_.size() - 1)
@ -205,28 +205,28 @@ void Pwl::Map2(Pwl const &pwl0, Pwl const &pwl1,
x = pwl1.points_[++span1].x;
else
x = pwl0.points_[++span0].x;
f(x, pwl0.Eval(x, &span0, false), pwl1.Eval(x, &span1, false));
f(x, pwl0.eval(x, &span0, false), pwl1.eval(x, &span1, false));
}
}
Pwl Pwl::Combine(Pwl const &pwl0, Pwl const &pwl1,
Pwl Pwl::combine(Pwl const &pwl0, Pwl const &pwl1,
std::function<double(double x, double y0, double y1)> f,
const double eps)
{
Pwl result;
Map2(pwl0, pwl1, [&](double x, double y0, double y1) {
result.Append(x, f(x, y0, y1), eps);
map2(pwl0, pwl1, [&](double x, double y0, double y1) {
result.append(x, f(x, y0, y1), eps);
});
return result;
}
void Pwl::MatchDomain(Interval const &domain, bool clip, const double eps)
void Pwl::matchDomain(Interval const &domain, bool clip, const double eps)
{
int span = 0;
Prepend(domain.start, Eval(clip ? points_[0].x : domain.start, &span),
prepend(domain.start, eval(clip ? points_[0].x : domain.start, &span),
eps);
span = points_.size() - 2;
Append(domain.end, Eval(clip ? points_.back().x : domain.end, &span),
append(domain.end, eval(clip ? points_.back().x : domain.end, &span),
eps);
}
@ -237,7 +237,7 @@ Pwl &Pwl::operator*=(double d)
return *this;
}
void Pwl::Debug(FILE *fp) const
void Pwl::debug(FILE *fp) const
{
fprintf(fp, "Pwl {\n");
for (auto &p : points_)

View file

@ -17,24 +17,26 @@ class Pwl
{
public:
struct Interval {
Interval(double _start, double _end) : start(_start), end(_end)
Interval(double _start, double _end)
: start(_start), end(_end)
{
}
double start, end;
bool Contains(double value)
bool contains(double value)
{
return value >= start && value <= end;
}
double Clip(double value)
double clip(double value)
{
return value < start ? start
: (value > end ? end : value);
}
double Len() const { return end - start; }
double len() const { return end - start; }
};
struct Point {
Point() : x(0), y(0) {}
Point(double _x, double _y) : x(_x), y(_y) {}
Point(double _x, double _y)
: x(_x), y(_y) {}
double x, y;
Point operator-(Point const &p) const
{
@ -50,23 +52,23 @@ public:
}
Point operator*(double f) const { return Point(x * f, y * f); }
Point operator/(double f) const { return Point(x / f, y / f); }
double Len2() const { return x * x + y * y; }
double Len() const { return sqrt(Len2()); }
double len2() const { return x * x + y * y; }
double len() const { return sqrt(len2()); }
};
Pwl() {}
Pwl(std::vector<Point> const &points) : points_(points) {}
void Read(boost::property_tree::ptree const &params);
void Append(double x, double y, const double eps = 1e-6);
void Prepend(double x, double y, const double eps = 1e-6);
Interval Domain() const;
Interval Range() const;
bool Empty() const;
void read(boost::property_tree::ptree const &params);
void append(double x, double y, const double eps = 1e-6);
void prepend(double x, double y, const double eps = 1e-6);
Interval domain() const;
Interval range() const;
bool empty() const;
// Evaluate Pwl, optionally supplying an initial guess for the
// "span". The "span" may be optionally be updated. If you want to know
// the "span" value but don't have an initial guess you can set it to
// -1.
double Eval(double x, int *span_ptr = nullptr,
bool update_span = true) const;
double eval(double x, int *spanPtr = nullptr,
bool updateSpan = true) const;
// Find perpendicular closest to xy, starting from span+1 so you can
// call it repeatedly to check for multiple closest points (set span to
// -1 on the first call). Also returns "pseudo" perpendiculars; see
@ -78,31 +80,31 @@ public:
Vertex, // vertex of Pwl is closest point
Perpendicular // true perpendicular found
};
PerpType Invert(Point const &xy, Point &perp, int &span,
PerpType invert(Point const &xy, Point &perp, int &span,
const double eps = 1e-6) const;
// Compute the inverse function. Indicate if it is a proper (true)
// inverse, or only a best effort (e.g. input was non-monotonic).
Pwl Inverse(bool *true_inverse = nullptr, const double eps = 1e-6) const;
Pwl inverse(bool *trueInverse = nullptr, const double eps = 1e-6) const;
// Compose two Pwls together, doing "this" first and "other" after.
Pwl Compose(Pwl const &other, const double eps = 1e-6) const;
Pwl compose(Pwl const &other, const double eps = 1e-6) const;
// Apply function to (x,y) values at every control point.
void Map(std::function<void(double x, double y)> f) const;
void map(std::function<void(double x, double y)> f) const;
// Apply function to (x, y0, y1) values wherever either Pwl has a
// control point.
static void Map2(Pwl const &pwl0, Pwl const &pwl1,
static void map2(Pwl const &pwl0, Pwl const &pwl1,
std::function<void(double x, double y0, double y1)> f);
// Combine two Pwls, meaning we create a new Pwl where the y values are
// given by running f wherever either has a knot.
static Pwl
Combine(Pwl const &pwl0, Pwl const &pwl1,
combine(Pwl const &pwl0, Pwl const &pwl1,
std::function<double(double x, double y0, double y1)> f,
const double eps = 1e-6);
// Make "this" match (at least) the given domain. Any extension my be
// clipped or linear.
void MatchDomain(Interval const &domain, bool clip = true,
void matchDomain(Interval const &domain, bool clip = true,
const double eps = 1e-6);
Pwl &operator*=(double d);
void Debug(FILE *fp = stdout) const;
void debug(FILE *fp = stdout) const;
private:
int findSpan(double x, int span) const;

File diff suppressed because it is too large Load diff

View file

@ -26,114 +26,114 @@ namespace RPiController {
struct AgcMeteringMode {
double weights[AGC_STATS_SIZE];
void Read(boost::property_tree::ptree const &params);
void read(boost::property_tree::ptree const &params);
};
struct AgcExposureMode {
std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
void Read(boost::property_tree::ptree const &params);
void read(boost::property_tree::ptree const &params);
};
struct AgcConstraint {
enum class Bound { LOWER = 0, UPPER = 1 };
Bound bound;
double q_lo;
double q_hi;
Pwl Y_target;
void Read(boost::property_tree::ptree const &params);
double qLo;
double qHi;
Pwl yTarget;
void read(boost::property_tree::ptree const &params);
};
typedef std::vector<AgcConstraint> AgcConstraintMode;
struct AgcConfig {
void Read(boost::property_tree::ptree const &params);
std::map<std::string, AgcMeteringMode> metering_modes;
std::map<std::string, AgcExposureMode> exposure_modes;
std::map<std::string, AgcConstraintMode> constraint_modes;
Pwl Y_target;
void read(boost::property_tree::ptree const &params);
std::map<std::string, AgcMeteringMode> meteringModes;
std::map<std::string, AgcExposureMode> exposureModes;
std::map<std::string, AgcConstraintMode> constraintModes;
Pwl yTarget;
double speed;
uint16_t startup_frames;
unsigned int convergence_frames;
double max_change;
double min_change;
double fast_reduce_threshold;
double speed_up_threshold;
std::string default_metering_mode;
std::string default_exposure_mode;
std::string default_constraint_mode;
double base_ev;
libcamera::utils::Duration default_exposure_time;
double default_analogue_gain;
uint16_t startupFrames;
unsigned int convergenceFrames;
double maxChange;
double minChange;
double fastReduceThreshold;
double speedUpThreshold;
std::string defaultMeteringMode;
std::string defaultExposureMode;
std::string defaultConstraintMode;
double baseEv;
libcamera::utils::Duration defaultExposureTime;
double defaultAnalogueGain;
};
class Agc : public AgcAlgorithm
{
public:
Agc(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
// AGC handles "pausing" for itself.
bool IsPaused() const override;
void Pause() override;
void Resume() override;
unsigned int GetConvergenceFrames() const override;
void SetEv(double ev) override;
void SetFlickerPeriod(libcamera::utils::Duration flicker_period) override;
void SetMaxShutter(libcamera::utils::Duration max_shutter) override;
void SetFixedShutter(libcamera::utils::Duration fixed_shutter) override;
void SetFixedAnalogueGain(double fixed_analogue_gain) override;
void SetMeteringMode(std::string const &metering_mode_name) override;
void SetExposureMode(std::string const &exposure_mode_name) override;
void SetConstraintMode(std::string const &contraint_mode_name) override;
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
bool isPaused() const override;
void pause() override;
void resume() override;
unsigned int getConvergenceFrames() const override;
void setEv(double ev) override;
void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) override;
void setMaxShutter(libcamera::utils::Duration maxShutter) override;
void setFixedShutter(libcamera::utils::Duration fixedShutter) override;
void setFixedAnalogueGain(double fixedAnalogueGain) override;
void setMeteringMode(std::string const &meteringModeName) override;
void setExposureMode(std::string const &exposureModeName) override;
void setConstraintMode(std::string const &contraintModeName) override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
void updateLockStatus(DeviceStatus const &device_status);
void updateLockStatus(DeviceStatus const &deviceStatus);
AgcConfig config_;
void housekeepConfig();
void fetchCurrentExposure(Metadata *image_metadata);
void fetchAwbStatus(Metadata *image_metadata);
void computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
double &gain, double &target_Y);
void fetchCurrentExposure(Metadata *imageMetadata);
void fetchAwbStatus(Metadata *imageMetadata);
void computeGain(bcm2835_isp_stats *statistics, Metadata *imageMetadata,
double &gain, double &targetY);
void computeTargetExposure(double gain);
bool applyDigitalGain(double gain, double target_Y);
bool applyDigitalGain(double gain, double targetY);
void filterExposure(bool desaturate);
void divideUpExposure();
void writeAndFinish(Metadata *image_metadata, bool desaturate);
void writeAndFinish(Metadata *imageMetadata, bool desaturate);
libcamera::utils::Duration clipShutter(libcamera::utils::Duration shutter);
AgcMeteringMode *metering_mode_;
AgcExposureMode *exposure_mode_;
AgcConstraintMode *constraint_mode_;
uint64_t frame_count_;
AgcMeteringMode *meteringMode_;
AgcExposureMode *exposureMode_;
AgcConstraintMode *constraintMode_;
uint64_t frameCount_;
AwbStatus awb_;
struct ExposureValues {
ExposureValues();
libcamera::utils::Duration shutter;
double analogue_gain;
libcamera::utils::Duration total_exposure;
libcamera::utils::Duration total_exposure_no_dg; // without digital gain
double analogueGain;
libcamera::utils::Duration totalExposure;
libcamera::utils::Duration totalExposureNoDG; // without digital gain
};
ExposureValues current_; // values for the current frame
ExposureValues target_; // calculate the values we want here
ExposureValues filtered_; // these values are filtered towards target
AgcStatus status_;
int lock_count_;
DeviceStatus last_device_status_;
libcamera::utils::Duration last_target_exposure_;
double last_sensitivity_; // sensitivity of the previous camera mode
int lockCount_;
DeviceStatus lastDeviceStatus_;
libcamera::utils::Duration lastTargetExposure_;
double lastSensitivity_; // sensitivity of the previous camera mode
// Below here the "settings" that applications can change.
std::string metering_mode_name_;
std::string exposure_mode_name_;
std::string constraint_mode_name_;
std::string meteringModeName_;
std::string exposureModeName_;
std::string constraintModeName_;
double ev_;
libcamera::utils::Duration flicker_period_;
libcamera::utils::Duration max_shutter_;
libcamera::utils::Duration fixed_shutter_;
double fixed_analogue_gain_;
libcamera::utils::Duration flickerPeriod_;
libcamera::utils::Duration maxShutter_;
libcamera::utils::Duration fixedShutter_;
double fixedAnalogueGain_;
};
} // namespace RPiController

File diff suppressed because it is too large Load diff

View file

@ -24,24 +24,24 @@ struct AlscCalibration {
struct AlscConfig {
// Only repeat the ALSC calculation every "this many" frames
uint16_t frame_period;
uint16_t framePeriod;
// number of initial frames for which speed taken as 1.0 (maximum)
uint16_t startup_frames;
uint16_t startupFrames;
// IIR filter speed applied to algorithm results
double speed;
double sigma_Cr;
double sigma_Cb;
double min_count;
uint16_t min_G;
double sigmaCr;
double sigmaCb;
double minCount;
uint16_t minG;
double omega;
uint32_t n_iter;
double luminance_lut[ALSC_CELLS_X * ALSC_CELLS_Y];
double luminance_strength;
std::vector<AlscCalibration> calibrations_Cr;
std::vector<AlscCalibration> calibrations_Cb;
double default_ct; // colour temperature if no metadata found
uint32_t nIter;
double luminanceLut[ALSC_CELLS_X * ALSC_CELLS_Y];
double luminanceStrength;
std::vector<AlscCalibration> calibrationsCr;
std::vector<AlscCalibration> calibrationsCb;
double defaultCt; // colour temperature if no metadata found
double threshold; // iteration termination threshold
double lambda_bound; // upper/lower bound for lambda from a value of 1
double lambdaBound; // upper/lower bound for lambda from a value of 1
};
class Alsc : public Algorithm
@ -49,58 +49,58 @@ class Alsc : public Algorithm
public:
Alsc(Controller *controller = NULL);
~Alsc();
char const *Name() const override;
void Initialise() override;
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
char const *name() const override;
void initialise() override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
// configuration is read-only, and available to both threads
AlscConfig config_;
bool first_time_;
CameraMode camera_mode_;
double luminance_table_[ALSC_CELLS_X * ALSC_CELLS_Y];
std::thread async_thread_;
bool firstTime_;
CameraMode cameraMode_;
double luminanceTable_[ALSC_CELLS_X * ALSC_CELLS_Y];
std::thread asyncThread_;
void asyncFunc(); // asynchronous thread function
std::mutex mutex_;
// condvar for async thread to wait on
std::condition_variable async_signal_;
std::condition_variable asyncSignal_;
// condvar for synchronous thread to wait on
std::condition_variable sync_signal_;
std::condition_variable syncSignal_;
// for sync thread to check if async thread finished (requires mutex)
bool async_finished_;
bool asyncFinished_;
// for async thread to check if it's been told to run (requires mutex)
bool async_start_;
bool asyncStart_;
// for async thread to check if it's been told to quit (requires mutex)
bool async_abort_;
bool asyncAbort_;
// The following are only for the synchronous thread to use:
// for sync thread to note its has asked async thread to run
bool async_started_;
// counts up to frame_period before restarting the async thread
int frame_phase_;
// counts up to startup_frames
int frame_count_;
// counts up to startup_frames for Process function
int frame_count2_;
double sync_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
double prev_sync_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
bool asyncStarted_;
// counts up to framePeriod before restarting the async thread
int framePhase_;
// counts up to startupFrames
int frameCount_;
// counts up to startupFrames for Process function
int frameCount2_;
double syncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
double prevSyncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
void waitForAysncThread();
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
void restartAsync(StatisticsPtr &stats, Metadata *image_metadata);
void restartAsync(StatisticsPtr &stats, Metadata *imageMetadata);
// copy out the results from the async thread so that it can be restarted
void fetchAsyncResults();
double ct_;
bcm2835_isp_stats_region statistics_[ALSC_CELLS_Y * ALSC_CELLS_X];
double async_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
double async_lambda_r_[ALSC_CELLS_X * ALSC_CELLS_Y];
double async_lambda_b_[ALSC_CELLS_X * ALSC_CELLS_Y];
double asyncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
double asyncLambdaR_[ALSC_CELLS_X * ALSC_CELLS_Y];
double asyncLambdaB_[ALSC_CELLS_X * ALSC_CELLS_Y];
void doAlsc();
double lambda_r_[ALSC_CELLS_X * ALSC_CELLS_Y];
double lambda_b_[ALSC_CELLS_X * ALSC_CELLS_Y];
double lambdaR_[ALSC_CELLS_X * ALSC_CELLS_Y];
double lambdaB_[ALSC_CELLS_X * ALSC_CELLS_Y];
};
} // namespace RPiController

View file

@ -24,33 +24,33 @@ LOG_DEFINE_CATEGORY(RPiAwb)
// todo - the locking in this algorithm needs some tidying up as has been done
// elsewhere (ALSC and AGC).
void AwbMode::Read(boost::property_tree::ptree const &params)
void AwbMode::read(boost::property_tree::ptree const &params)
{
ct_lo = params.get<double>("lo");
ct_hi = params.get<double>("hi");
ctLo = params.get<double>("lo");
ctHi = params.get<double>("hi");
}
void AwbPrior::Read(boost::property_tree::ptree const &params)
void AwbPrior::read(boost::property_tree::ptree const &params)
{
lux = params.get<double>("lux");
prior.Read(params.get_child("prior"));
prior.read(params.get_child("prior"));
}
static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
boost::property_tree::ptree const &params)
static void readCtCurve(Pwl &ctR, Pwl &ctB,
boost::property_tree::ptree const &params)
{
int num = 0;
for (auto it = params.begin(); it != params.end(); it++) {
double ct = it->second.get_value<double>();
assert(it == params.begin() || ct != ct_r.Domain().end);
assert(it == params.begin() || ct != ctR.domain().end);
if (++it == params.end())
throw std::runtime_error(
"AwbConfig: incomplete CT curve entry");
ct_r.Append(ct, it->second.get_value<double>());
ctR.append(ct, it->second.get_value<double>());
if (++it == params.end())
throw std::runtime_error(
"AwbConfig: incomplete CT curve entry");
ct_b.Append(ct, it->second.get_value<double>());
ctB.append(ct, it->second.get_value<double>());
num++;
}
if (num < 2)
@ -58,22 +58,21 @@ static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
"AwbConfig: insufficient points in CT curve");
}
void AwbConfig::Read(boost::property_tree::ptree const &params)
void AwbConfig::read(boost::property_tree::ptree const &params)
{
bayes = params.get<int>("bayes", 1);
frame_period = params.get<uint16_t>("frame_period", 10);
startup_frames = params.get<uint16_t>("startup_frames", 10);
convergence_frames = params.get<unsigned int>("convergence_frames", 3);
framePeriod = params.get<uint16_t>("framePeriod", 10);
startupFrames = params.get<uint16_t>("startupFrames", 10);
convergenceFrames = params.get<unsigned int>("convergence_frames", 3);
speed = params.get<double>("speed", 0.05);
if (params.get_child_optional("ct_curve"))
read_ct_curve(ct_r, ct_b, params.get_child("ct_curve"));
readCtCurve(ctR, ctB, params.get_child("ct_curve"));
if (params.get_child_optional("priors")) {
for (auto &p : params.get_child("priors")) {
AwbPrior prior;
prior.Read(p.second);
prior.read(p.second);
if (!priors.empty() && prior.lux <= priors.back().lux)
throw std::runtime_error(
"AwbConfig: Prior must be ordered in increasing lux value");
throw std::runtime_error("AwbConfig: Prior must be ordered in increasing lux value");
priors.push_back(prior);
}
if (priors.empty())
@ -82,177 +81,170 @@ void AwbConfig::Read(boost::property_tree::ptree const &params)
}
if (params.get_child_optional("modes")) {
for (auto &p : params.get_child("modes")) {
modes[p.first].Read(p.second);
if (default_mode == nullptr)
default_mode = &modes[p.first];
modes[p.first].read(p.second);
if (defaultMode == nullptr)
defaultMode = &modes[p.first];
}
if (default_mode == nullptr)
throw std::runtime_error(
"AwbConfig: no AWB modes configured");
if (defaultMode == nullptr)
throw std::runtime_error("AwbConfig: no AWB modes configured");
}
min_pixels = params.get<double>("min_pixels", 16.0);
min_G = params.get<uint16_t>("min_G", 32);
min_regions = params.get<uint32_t>("min_regions", 10);
delta_limit = params.get<double>("delta_limit", 0.2);
coarse_step = params.get<double>("coarse_step", 0.2);
transverse_pos = params.get<double>("transverse_pos", 0.01);
transverse_neg = params.get<double>("transverse_neg", 0.01);
if (transverse_pos <= 0 || transverse_neg <= 0)
throw std::runtime_error(
"AwbConfig: transverse_pos/neg must be > 0");
sensitivity_r = params.get<double>("sensitivity_r", 1.0);
sensitivity_b = params.get<double>("sensitivity_b", 1.0);
minPixels = params.get<double>("min_pixels", 16.0);
minG = params.get<uint16_t>("min_G", 32);
minRegions = params.get<uint32_t>("min_regions", 10);
deltaLimit = params.get<double>("delta_limit", 0.2);
coarseStep = params.get<double>("coarse_step", 0.2);
transversePos = params.get<double>("transverse_pos", 0.01);
transverseNeg = params.get<double>("transverse_neg", 0.01);
if (transversePos <= 0 || transverseNeg <= 0)
throw std::runtime_error("AwbConfig: transverse_pos/neg must be > 0");
sensitivityR = params.get<double>("sensitivity_r", 1.0);
sensitivityB = params.get<double>("sensitivity_b", 1.0);
if (bayes) {
if (ct_r.Empty() || ct_b.Empty() || priors.empty() ||
default_mode == nullptr) {
if (ctR.empty() || ctB.empty() || priors.empty() ||
defaultMode == nullptr) {
LOG(RPiAwb, Warning)
<< "Bayesian AWB mis-configured - switch to Grey method";
bayes = false;
}
}
fast = params.get<int>(
"fast", bayes); // default to fast for Bayesian, otherwise slow
whitepoint_r = params.get<double>("whitepoint_r", 0.0);
whitepoint_b = params.get<double>("whitepoint_b", 0.0);
fast = params.get<int>("fast", bayes); // default to fast for Bayesian, otherwise slow
whitepointR = params.get<double>("whitepoint_r", 0.0);
whitepointB = params.get<double>("whitepoint_b", 0.0);
if (bayes == false)
sensitivity_r = sensitivity_b =
1.0; // nor do sensitivities make any sense
sensitivityR = sensitivityB = 1.0; // nor do sensitivities make any sense
}
Awb::Awb(Controller *controller)
: AwbAlgorithm(controller)
{
async_abort_ = async_start_ = async_started_ = async_finished_ = false;
asyncAbort_ = asyncStart_ = asyncStarted_ = asyncFinished_ = false;
mode_ = nullptr;
manual_r_ = manual_b_ = 0.0;
first_switch_mode_ = true;
async_thread_ = std::thread(std::bind(&Awb::asyncFunc, this));
manualR_ = manualB_ = 0.0;
firstSwitchMode_ = true;
asyncThread_ = std::thread(std::bind(&Awb::asyncFunc, this));
}
Awb::~Awb()
{
{
std::lock_guard<std::mutex> lock(mutex_);
async_abort_ = true;
asyncAbort_ = true;
}
async_signal_.notify_one();
async_thread_.join();
asyncSignal_.notify_one();
asyncThread_.join();
}
char const *Awb::Name() const
char const *Awb::name() const
{
return NAME;
}
void Awb::Read(boost::property_tree::ptree const &params)
void Awb::read(boost::property_tree::ptree const &params)
{
config_.Read(params);
config_.read(params);
}
void Awb::Initialise()
void Awb::initialise()
{
frame_count_ = frame_phase_ = 0;
frameCount_ = framePhase_ = 0;
// Put something sane into the status that we are filtering towards,
// just in case the first few frames don't have anything meaningful in
// them.
if (!config_.ct_r.Empty() && !config_.ct_b.Empty()) {
sync_results_.temperature_K = config_.ct_r.Domain().Clip(4000);
sync_results_.gain_r =
1.0 / config_.ct_r.Eval(sync_results_.temperature_K);
sync_results_.gain_g = 1.0;
sync_results_.gain_b =
1.0 / config_.ct_b.Eval(sync_results_.temperature_K);
if (!config_.ctR.empty() && !config_.ctB.empty()) {
syncResults_.temperatureK = config_.ctR.domain().clip(4000);
syncResults_.gainR = 1.0 / config_.ctR.eval(syncResults_.temperatureK);
syncResults_.gainG = 1.0;
syncResults_.gainB = 1.0 / config_.ctB.eval(syncResults_.temperatureK);
} else {
// random values just to stop the world blowing up
sync_results_.temperature_K = 4500;
sync_results_.gain_r = sync_results_.gain_g =
sync_results_.gain_b = 1.0;
syncResults_.temperatureK = 4500;
syncResults_.gainR = syncResults_.gainG = syncResults_.gainB = 1.0;
}
prev_sync_results_ = sync_results_;
async_results_ = sync_results_;
prevSyncResults_ = syncResults_;
asyncResults_ = syncResults_;
}
bool Awb::IsPaused() const
bool Awb::isPaused() const
{
return false;
}
void Awb::Pause()
void Awb::pause()
{
// "Pause" by fixing everything to the most recent values.
manual_r_ = sync_results_.gain_r = prev_sync_results_.gain_r;
manual_b_ = sync_results_.gain_b = prev_sync_results_.gain_b;
sync_results_.gain_g = prev_sync_results_.gain_g;
sync_results_.temperature_K = prev_sync_results_.temperature_K;
manualR_ = syncResults_.gainR = prevSyncResults_.gainR;
manualB_ = syncResults_.gainB = prevSyncResults_.gainB;
syncResults_.gainG = prevSyncResults_.gainG;
syncResults_.temperatureK = prevSyncResults_.temperatureK;
}
void Awb::Resume()
void Awb::resume()
{
manual_r_ = 0.0;
manual_b_ = 0.0;
manualR_ = 0.0;
manualB_ = 0.0;
}
unsigned int Awb::GetConvergenceFrames() const
unsigned int Awb::getConvergenceFrames() const
{
// If not in auto mode, there is no convergence
// to happen, so no need to drop any frames - return zero.
if (!isAutoEnabled())
return 0;
else
return config_.convergence_frames;
return config_.convergenceFrames;
}
void Awb::SetMode(std::string const &mode_name)
void Awb::setMode(std::string const &modeName)
{
mode_name_ = mode_name;
modeName_ = modeName;
}
void Awb::SetManualGains(double manual_r, double manual_b)
void Awb::setManualGains(double manualR, double manualB)
{
// If any of these are 0.0, we swich back to auto.
manual_r_ = manual_r;
manual_b_ = manual_b;
// If not in auto mode, set these values into the sync_results which
manualR_ = manualR;
manualB_ = manualB;
// If not in auto mode, set these values into the syncResults which
// means that Prepare() will adopt them immediately.
if (!isAutoEnabled()) {
sync_results_.gain_r = prev_sync_results_.gain_r = manual_r_;
sync_results_.gain_g = prev_sync_results_.gain_g = 1.0;
sync_results_.gain_b = prev_sync_results_.gain_b = manual_b_;
syncResults_.gainR = prevSyncResults_.gainR = manualR_;
syncResults_.gainG = prevSyncResults_.gainG = 1.0;
syncResults_.gainB = prevSyncResults_.gainB = manualB_;
}
}
void Awb::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
void Awb::switchMode([[maybe_unused]] CameraMode const &cameraMode,
Metadata *metadata)
{
// On the first mode switch we'll have no meaningful colour
// temperature, so try to dead reckon one if in manual mode.
if (!isAutoEnabled() && first_switch_mode_ && config_.bayes) {
Pwl ct_r_inverse = config_.ct_r.Inverse();
Pwl ct_b_inverse = config_.ct_b.Inverse();
double ct_r = ct_r_inverse.Eval(ct_r_inverse.Domain().Clip(1 / manual_r_));
double ct_b = ct_b_inverse.Eval(ct_b_inverse.Domain().Clip(1 / manual_b_));
prev_sync_results_.temperature_K = (ct_r + ct_b) / 2;
sync_results_.temperature_K = prev_sync_results_.temperature_K;
if (!isAutoEnabled() && firstSwitchMode_ && config_.bayes) {
Pwl ctRInverse = config_.ctR.inverse();
Pwl ctBInverse = config_.ctB.inverse();
double ctR = ctRInverse.eval(ctRInverse.domain().clip(1 / manualR_));
double ctB = ctBInverse.eval(ctBInverse.domain().clip(1 / manualB_));
prevSyncResults_.temperatureK = (ctR + ctB) / 2;
syncResults_.temperatureK = prevSyncResults_.temperatureK;
}
// Let other algorithms know the current white balance values.
metadata->Set("awb.status", prev_sync_results_);
first_switch_mode_ = false;
metadata->set("awb.status", prevSyncResults_);
firstSwitchMode_ = false;
}
bool Awb::isAutoEnabled() const
{
return manual_r_ == 0.0 || manual_b_ == 0.0;
return manualR_ == 0.0 || manualB_ == 0.0;
}
void Awb::fetchAsyncResults()
{
LOG(RPiAwb, Debug) << "Fetch AWB results";
async_finished_ = false;
async_started_ = false;
asyncFinished_ = false;
asyncStarted_ = false;
// It's possible manual gains could be set even while the async
// thread was running, so only copy the results if still in auto mode.
if (isAutoEnabled())
sync_results_ = async_results_;
syncResults_ = asyncResults_;
}
void Awb::restartAsync(StatisticsPtr &stats, double lux)
@ -261,75 +253,74 @@ void Awb::restartAsync(StatisticsPtr &stats, double lux)
// this makes a new reference which belongs to the asynchronous thread
statistics_ = stats;
// store the mode as it could technically change
auto m = config_.modes.find(mode_name_);
auto m = config_.modes.find(modeName_);
mode_ = m != config_.modes.end()
? &m->second
: (mode_ == nullptr ? config_.default_mode : mode_);
: (mode_ == nullptr ? config_.defaultMode : mode_);
lux_ = lux;
frame_phase_ = 0;
async_started_ = true;
size_t len = mode_name_.copy(async_results_.mode,
sizeof(async_results_.mode) - 1);
async_results_.mode[len] = '\0';
framePhase_ = 0;
asyncStarted_ = true;
size_t len = modeName_.copy(asyncResults_.mode,
sizeof(asyncResults_.mode) - 1);
asyncResults_.mode[len] = '\0';
{
std::lock_guard<std::mutex> lock(mutex_);
async_start_ = true;
asyncStart_ = true;
}
async_signal_.notify_one();
asyncSignal_.notify_one();
}
void Awb::Prepare(Metadata *image_metadata)
void Awb::prepare(Metadata *imageMetadata)
{
if (frame_count_ < (int)config_.startup_frames)
frame_count_++;
double speed = frame_count_ < (int)config_.startup_frames
if (frameCount_ < (int)config_.startupFrames)
frameCount_++;
double speed = frameCount_ < (int)config_.startupFrames
? 1.0
: config_.speed;
LOG(RPiAwb, Debug)
<< "frame_count " << frame_count_ << " speed " << speed;
<< "frame_count " << frameCount_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
if (async_started_ && async_finished_)
if (asyncStarted_ && asyncFinished_)
fetchAsyncResults();
}
// Finally apply IIR filter to results and put into metadata.
memcpy(prev_sync_results_.mode, sync_results_.mode,
sizeof(prev_sync_results_.mode));
prev_sync_results_.temperature_K =
speed * sync_results_.temperature_K +
(1.0 - speed) * prev_sync_results_.temperature_K;
prev_sync_results_.gain_r = speed * sync_results_.gain_r +
(1.0 - speed) * prev_sync_results_.gain_r;
prev_sync_results_.gain_g = speed * sync_results_.gain_g +
(1.0 - speed) * prev_sync_results_.gain_g;
prev_sync_results_.gain_b = speed * sync_results_.gain_b +
(1.0 - speed) * prev_sync_results_.gain_b;
image_metadata->Set("awb.status", prev_sync_results_);
memcpy(prevSyncResults_.mode, syncResults_.mode,
sizeof(prevSyncResults_.mode));
prevSyncResults_.temperatureK = speed * syncResults_.temperatureK +
(1.0 - speed) * prevSyncResults_.temperatureK;
prevSyncResults_.gainR = speed * syncResults_.gainR +
(1.0 - speed) * prevSyncResults_.gainR;
prevSyncResults_.gainG = speed * syncResults_.gainG +
(1.0 - speed) * prevSyncResults_.gainG;
prevSyncResults_.gainB = speed * syncResults_.gainB +
(1.0 - speed) * prevSyncResults_.gainB;
imageMetadata->set("awb.status", prevSyncResults_);
LOG(RPiAwb, Debug)
<< "Using AWB gains r " << prev_sync_results_.gain_r << " g "
<< prev_sync_results_.gain_g << " b "
<< prev_sync_results_.gain_b;
<< "Using AWB gains r " << prevSyncResults_.gainR << " g "
<< prevSyncResults_.gainG << " b "
<< prevSyncResults_.gainB;
}
void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
void Awb::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
// Count frames since we last poked the async thread.
if (frame_phase_ < (int)config_.frame_period)
frame_phase_++;
LOG(RPiAwb, Debug) << "frame_phase " << frame_phase_;
if (framePhase_ < (int)config_.framePeriod)
framePhase_++;
LOG(RPiAwb, Debug) << "frame_phase " << framePhase_;
// We do not restart the async thread if we're not in auto mode.
if (isAutoEnabled() &&
(frame_phase_ >= (int)config_.frame_period ||
frame_count_ < (int)config_.startup_frames)) {
(framePhase_ >= (int)config_.framePeriod ||
frameCount_ < (int)config_.startupFrames)) {
// Update any settings and any image metadata that we need.
struct LuxStatus lux_status = {};
lux_status.lux = 400; // in case no metadata
if (image_metadata->Get("lux.status", lux_status) != 0)
struct LuxStatus luxStatus = {};
luxStatus.lux = 400; // in case no metadata
if (imageMetadata->get("lux.status", luxStatus) != 0)
LOG(RPiAwb, Debug) << "No lux metadata found";
LOG(RPiAwb, Debug) << "Awb lux value is " << lux_status.lux;
LOG(RPiAwb, Debug) << "Awb lux value is " << luxStatus.lux;
if (async_started_ == false)
restartAsync(stats, lux_status.lux);
if (asyncStarted_ == false)
restartAsync(stats, luxStatus.lux);
}
}
@ -338,32 +329,32 @@ void Awb::asyncFunc()
while (true) {
{
std::unique_lock<std::mutex> lock(mutex_);
async_signal_.wait(lock, [&] {
return async_start_ || async_abort_;
asyncSignal_.wait(lock, [&] {
return asyncStart_ || asyncAbort_;
});
async_start_ = false;
if (async_abort_)
asyncStart_ = false;
if (asyncAbort_)
break;
}
doAwb();
{
std::lock_guard<std::mutex> lock(mutex_);
async_finished_ = true;
asyncFinished_ = true;
}
sync_signal_.notify_one();
syncSignal_.notify_one();
}
}
static void generate_stats(std::vector<Awb::RGB> &zones,
bcm2835_isp_stats_region *stats, double min_pixels,
double min_G)
static void generateStats(std::vector<Awb::RGB> &zones,
bcm2835_isp_stats_region *stats, double minPixels,
double minG)
{
for (int i = 0; i < AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y; i++) {
Awb::RGB zone;
double counted = stats[i].counted;
if (counted >= min_pixels) {
if (counted >= minPixels) {
zone.G = stats[i].g_sum / counted;
if (zone.G >= min_G) {
if (zone.G >= minG) {
zone.R = stats[i].r_sum / counted;
zone.B = stats[i].b_sum / counted;
zones.push_back(zone);
@ -377,32 +368,33 @@ void Awb::prepareStats()
zones_.clear();
// LSC has already been applied to the stats in this pipeline, so stop
// any LSC compensation. We also ignore config_.fast in this version.
generate_stats(zones_, statistics_->awb_stats, config_.min_pixels,
config_.min_G);
generateStats(zones_, statistics_->awb_stats, config_.minPixels,
config_.minG);
// we're done with these; we may as well relinquish our hold on the
// pointer.
statistics_.reset();
// apply sensitivities, so values appear to come from our "canonical"
// sensor.
for (auto &zone : zones_)
zone.R *= config_.sensitivity_r,
zone.B *= config_.sensitivity_b;
for (auto &zone : zones_) {
zone.R *= config_.sensitivityR;
zone.B *= config_.sensitivityB;
}
}
double Awb::computeDelta2Sum(double gain_r, double gain_b)
double Awb::computeDelta2Sum(double gainR, double gainB)
{
// Compute the sum of the squared colour error (non-greyness) as it
// appears in the log likelihood equation.
double delta2_sum = 0;
double delta2Sum = 0;
for (auto &z : zones_) {
double delta_r = gain_r * z.R - 1 - config_.whitepoint_r;
double delta_b = gain_b * z.B - 1 - config_.whitepoint_b;
double delta2 = delta_r * delta_r + delta_b * delta_b;
//LOG(RPiAwb, Debug) << "delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2;
delta2 = std::min(delta2, config_.delta_limit);
delta2_sum += delta2;
double deltaR = gainR * z.R - 1 - config_.whitepointR;
double deltaB = gainB * z.B - 1 - config_.whitepointB;
double delta2 = deltaR * deltaR + deltaB * deltaB;
//LOG(RPiAwb, Debug) << "deltaR " << deltaR << " deltaB " << deltaB << " delta2 " << delta2;
delta2 = std::min(delta2, config_.deltaLimit);
delta2Sum += delta2;
}
return delta2_sum;
return delta2Sum;
}
Pwl Awb::interpolatePrior()
@ -420,7 +412,7 @@ Pwl Awb::interpolatePrior()
idx++;
double lux0 = config_.priors[idx].lux,
lux1 = config_.priors[idx + 1].lux;
return Pwl::Combine(config_.priors[idx].prior,
return Pwl::combine(config_.priors[idx].prior,
config_.priors[idx + 1].prior,
[&](double /*x*/, double y0, double y1) {
return y0 + (y1 - y0) *
@ -429,62 +421,60 @@ Pwl Awb::interpolatePrior()
}
}
static double interpolate_quadatric(Pwl::Point const &A, Pwl::Point const &B,
Pwl::Point const &C)
static double interpolateQuadatric(Pwl::Point const &a, Pwl::Point const &b,
Pwl::Point const &c)
{
// Given 3 points on a curve, find the extremum of the function in that
// interval by fitting a quadratic.
const double eps = 1e-3;
Pwl::Point CA = C - A, BA = B - A;
double denominator = 2 * (BA.y * CA.x - CA.y * BA.x);
Pwl::Point ca = c - a, ba = b - a;
double denominator = 2 * (ba.y * ca.x - ca.y * ba.x);
if (abs(denominator) > eps) {
double numerator = BA.y * CA.x * CA.x - CA.y * BA.x * BA.x;
double result = numerator / denominator + A.x;
return std::max(A.x, std::min(C.x, result));
double numerator = ba.y * ca.x * ca.x - ca.y * ba.x * ba.x;
double result = numerator / denominator + a.x;
return std::max(a.x, std::min(c.x, result));
}
// has degenerated to straight line segment
return A.y < C.y - eps ? A.x : (C.y < A.y - eps ? C.x : B.x);
return a.y < c.y - eps ? a.x : (c.y < a.y - eps ? c.x : b.x);
}
double Awb::coarseSearch(Pwl const &prior)
{
points_.clear(); // assume doesn't deallocate memory
size_t best_point = 0;
double t = mode_->ct_lo;
int span_r = 0, span_b = 0;
size_t bestPoint = 0;
double t = mode_->ctLo;
int spanR = 0, spanB = 0;
// Step down the CT curve evaluating log likelihood.
while (true) {
double r = config_.ct_r.Eval(t, &span_r);
double b = config_.ct_b.Eval(t, &span_b);
double gain_r = 1 / r, gain_b = 1 / b;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
double prior_log_likelihood =
prior.Eval(prior.Domain().Clip(t));
double final_log_likelihood = delta2_sum - prior_log_likelihood;
double r = config_.ctR.eval(t, &spanR);
double b = config_.ctB.eval(t, &spanB);
double gainR = 1 / r, gainB = 1 / b;
double delta2Sum = computeDelta2Sum(gainR, gainB);
double priorLogLikelihood = prior.eval(prior.domain().clip(t));
double finalLogLikelihood = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
<< "t: " << t << " gain_r " << gain_r << " gain_b "
<< gain_b << " delta2_sum " << delta2_sum
<< " prior " << prior_log_likelihood << " final "
<< final_log_likelihood;
points_.push_back(Pwl::Point(t, final_log_likelihood));
if (points_.back().y < points_[best_point].y)
best_point = points_.size() - 1;
if (t == mode_->ct_hi)
<< "t: " << t << " gain R " << gainR << " gain B "
<< gainB << " delta2_sum " << delta2Sum
<< " prior " << priorLogLikelihood << " final "
<< finalLogLikelihood;
points_.push_back(Pwl::Point(t, finalLogLikelihood));
if (points_.back().y < points_[bestPoint].y)
bestPoint = points_.size() - 1;
if (t == mode_->ctHi)
break;
// for even steps along the r/b curve scale them by the current t
t = std::min(t + t / 10 * config_.coarse_step,
mode_->ct_hi);
t = std::min(t + t / 10 * config_.coarseStep, mode_->ctHi);
}
t = points_[best_point].x;
t = points_[bestPoint].x;
LOG(RPiAwb, Debug) << "Coarse search found CT " << t;
// We have the best point of the search, but refine it with a quadratic
// interpolation around its neighbours.
if (points_.size() > 2) {
unsigned long bp = std::min(best_point, points_.size() - 2);
best_point = std::max(1UL, bp);
t = interpolate_quadatric(points_[best_point - 1],
points_[best_point],
points_[best_point + 1]);
unsigned long bp = std::min(bestPoint, points_.size() - 2);
bestPoint = std::max(1UL, bp);
t = interpolateQuadatric(points_[bestPoint - 1],
points_[bestPoint],
points_[bestPoint + 1]);
LOG(RPiAwb, Debug)
<< "After quadratic refinement, coarse search has CT "
<< t;
@ -494,80 +484,76 @@ double Awb::coarseSearch(Pwl const &prior)
void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
{
int span_r = -1, span_b = -1;
config_.ct_r.Eval(t, &span_r);
config_.ct_b.Eval(t, &span_b);
double step = t / 10 * config_.coarse_step * 0.1;
int spanR = -1, spanB = -1;
config_.ctR.eval(t, &spanR);
config_.ctB.eval(t, &spanB);
double step = t / 10 * config_.coarseStep * 0.1;
int nsteps = 5;
double r_diff = config_.ct_r.Eval(t + nsteps * step, &span_r) -
config_.ct_r.Eval(t - nsteps * step, &span_r);
double b_diff = config_.ct_b.Eval(t + nsteps * step, &span_b) -
config_.ct_b.Eval(t - nsteps * step, &span_b);
Pwl::Point transverse(b_diff, -r_diff);
if (transverse.Len2() < 1e-6)
double rDiff = config_.ctR.eval(t + nsteps * step, &spanR) -
config_.ctR.eval(t - nsteps * step, &spanR);
double bDiff = config_.ctB.eval(t + nsteps * step, &spanB) -
config_.ctB.eval(t - nsteps * step, &spanB);
Pwl::Point transverse(bDiff, -rDiff);
if (transverse.len2() < 1e-6)
return;
// unit vector orthogonal to the b vs. r function (pointing outwards
// with r and b increasing)
transverse = transverse / transverse.Len();
double best_log_likelihood = 0, best_t = 0, best_r = 0, best_b = 0;
double transverse_range =
config_.transverse_neg + config_.transverse_pos;
const int MAX_NUM_DELTAS = 12;
transverse = transverse / transverse.len();
double bestLogLikelihood = 0, bestT = 0, bestR = 0, bestB = 0;
double transverseRange = config_.transverseNeg + config_.transversePos;
const int maxNumDeltas = 12;
// a transverse step approximately every 0.01 r/b units
int num_deltas = floor(transverse_range * 100 + 0.5) + 1;
num_deltas = num_deltas < 3 ? 3 :
(num_deltas > MAX_NUM_DELTAS ? MAX_NUM_DELTAS : num_deltas);
int numDeltas = floor(transverseRange * 100 + 0.5) + 1;
numDeltas = numDeltas < 3 ? 3 : (numDeltas > maxNumDeltas ? maxNumDeltas : numDeltas);
// Step down CT curve. March a bit further if the transverse range is
// large.
nsteps += num_deltas;
nsteps += numDeltas;
for (int i = -nsteps; i <= nsteps; i++) {
double t_test = t + i * step;
double prior_log_likelihood =
prior.Eval(prior.Domain().Clip(t_test));
double r_curve = config_.ct_r.Eval(t_test, &span_r);
double b_curve = config_.ct_b.Eval(t_test, &span_b);
double tTest = t + i * step;
double priorLogLikelihood =
prior.eval(prior.domain().clip(tTest));
double rCurve = config_.ctR.eval(tTest, &spanR);
double bCurve = config_.ctB.eval(tTest, &spanB);
// x will be distance off the curve, y the log likelihood there
Pwl::Point points[MAX_NUM_DELTAS];
int best_point = 0;
Pwl::Point points[maxNumDeltas];
int bestPoint = 0;
// Take some measurements transversely *off* the CT curve.
for (int j = 0; j < num_deltas; j++) {
points[j].x = -config_.transverse_neg +
(transverse_range * j) / (num_deltas - 1);
Pwl::Point rb_test = Pwl::Point(r_curve, b_curve) +
transverse * points[j].x;
double r_test = rb_test.x, b_test = rb_test.y;
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
points[j].y = delta2_sum - prior_log_likelihood;
for (int j = 0; j < numDeltas; j++) {
points[j].x = -config_.transverseNeg +
(transverseRange * j) / (numDeltas - 1);
Pwl::Point rbTest = Pwl::Point(rCurve, bCurve) +
transverse * points[j].x;
double rTest = rbTest.x, bTest = rbTest.y;
double gainR = 1 / rTest, gainB = 1 / bTest;
double delta2Sum = computeDelta2Sum(gainR, gainB);
points[j].y = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
<< "At t " << t_test << " r " << r_test << " b "
<< b_test << ": " << points[j].y;
if (points[j].y < points[best_point].y)
best_point = j;
<< "At t " << tTest << " r " << rTest << " b "
<< bTest << ": " << points[j].y;
if (points[j].y < points[bestPoint].y)
bestPoint = j;
}
// We have NUM_DELTAS points transversely across the CT curve,
// now let's do a quadratic interpolation for the best result.
best_point = std::max(1, std::min(best_point, num_deltas - 2));
Pwl::Point rb_test =
Pwl::Point(r_curve, b_curve) +
transverse *
interpolate_quadatric(points[best_point - 1],
points[best_point],
points[best_point + 1]);
double r_test = rb_test.x, b_test = rb_test.y;
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
double final_log_likelihood = delta2_sum - prior_log_likelihood;
bestPoint = std::max(1, std::min(bestPoint, numDeltas - 2));
Pwl::Point rbTest = Pwl::Point(rCurve, bCurve) +
transverse * interpolateQuadatric(points[bestPoint - 1],
points[bestPoint],
points[bestPoint + 1]);
double rTest = rbTest.x, bTest = rbTest.y;
double gainR = 1 / rTest, gainB = 1 / bTest;
double delta2Sum = computeDelta2Sum(gainR, gainB);
double finalLogLikelihood = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
<< "Finally "
<< t_test << " r " << r_test << " b " << b_test << ": "
<< final_log_likelihood
<< (final_log_likelihood < best_log_likelihood ? " BEST" : "");
if (best_t == 0 || final_log_likelihood < best_log_likelihood)
best_log_likelihood = final_log_likelihood,
best_t = t_test, best_r = r_test, best_b = b_test;
<< tTest << " r " << rTest << " b " << bTest << ": "
<< finalLogLikelihood
<< (finalLogLikelihood < bestLogLikelihood ? " BEST" : "");
if (bestT == 0 || finalLogLikelihood < bestLogLikelihood)
bestLogLikelihood = finalLogLikelihood,
bestT = tTest, bestR = rTest, bestB = bTest;
}
t = best_t, r = best_r, b = best_b;
t = bestT, r = bestR, b = bestB;
LOG(RPiAwb, Debug)
<< "Fine search found t " << t << " r " << r << " b " << b;
}
@ -582,12 +568,12 @@ void Awb::awbBayes()
// valid... not entirely sure about this.
Pwl prior = interpolatePrior();
prior *= zones_.size() / (double)(AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y);
prior.Map([](double x, double y) {
prior.map([](double x, double y) {
LOG(RPiAwb, Debug) << "(" << x << "," << y << ")";
});
double t = coarseSearch(prior);
double r = config_.ct_r.Eval(t);
double b = config_.ct_b.Eval(t);
double r = config_.ctR.eval(t);
double b = config_.ctB.eval(t);
LOG(RPiAwb, Debug)
<< "After coarse search: r " << r << " b " << b << " (gains r "
<< 1 / r << " b " << 1 / b << ")";
@ -604,10 +590,10 @@ void Awb::awbBayes()
// Write results out for the main thread to pick up. Remember to adjust
// the gains from the ones that the "canonical sensor" would require to
// the ones needed by *this* sensor.
async_results_.temperature_K = t;
async_results_.gain_r = 1.0 / r * config_.sensitivity_r;
async_results_.gain_g = 1.0;
async_results_.gain_b = 1.0 / b * config_.sensitivity_b;
asyncResults_.temperatureK = t;
asyncResults_.gainR = 1.0 / r * config_.sensitivityR;
asyncResults_.gainG = 1.0;
asyncResults_.gainB = 1.0 / b * config_.sensitivityB;
}
void Awb::awbGrey()
@ -617,51 +603,51 @@ void Awb::awbGrey()
// that we can sort them to exclude the extreme gains. We could
// consider some variations, such as normalising all the zones first, or
// doing an L2 average etc.
std::vector<RGB> &derivs_R(zones_);
std::vector<RGB> derivs_B(derivs_R);
std::sort(derivs_R.begin(), derivs_R.end(),
std::vector<RGB> &derivsR(zones_);
std::vector<RGB> derivsB(derivsR);
std::sort(derivsR.begin(), derivsR.end(),
[](RGB const &a, RGB const &b) {
return a.G * b.R < b.G * a.R;
});
std::sort(derivs_B.begin(), derivs_B.end(),
std::sort(derivsB.begin(), derivsB.end(),
[](RGB const &a, RGB const &b) {
return a.G * b.B < b.G * a.B;
});
// Average the middle half of the values.
int discard = derivs_R.size() / 4;
RGB sum_R(0, 0, 0), sum_B(0, 0, 0);
for (auto ri = derivs_R.begin() + discard,
bi = derivs_B.begin() + discard;
ri != derivs_R.end() - discard; ri++, bi++)
sum_R += *ri, sum_B += *bi;
double gain_r = sum_R.G / (sum_R.R + 1),
gain_b = sum_B.G / (sum_B.B + 1);
async_results_.temperature_K = 4500; // don't know what it is
async_results_.gain_r = gain_r;
async_results_.gain_g = 1.0;
async_results_.gain_b = gain_b;
int discard = derivsR.size() / 4;
RGB sumR(0, 0, 0), sumB(0, 0, 0);
for (auto ri = derivsR.begin() + discard,
bi = derivsB.begin() + discard;
ri != derivsR.end() - discard; ri++, bi++)
sumR += *ri, sumB += *bi;
double gainR = sumR.G / (sumR.R + 1),
gainB = sumB.G / (sumB.B + 1);
asyncResults_.temperatureK = 4500; // don't know what it is
asyncResults_.gainR = gainR;
asyncResults_.gainG = 1.0;
asyncResults_.gainB = gainB;
}
void Awb::doAwb()
{
prepareStats();
LOG(RPiAwb, Debug) << "Valid zones: " << zones_.size();
if (zones_.size() > config_.min_regions) {
if (zones_.size() > config_.minRegions) {
if (config_.bayes)
awbBayes();
else
awbGrey();
LOG(RPiAwb, Debug)
<< "CT found is "
<< async_results_.temperature_K
<< " with gains r " << async_results_.gain_r
<< " and b " << async_results_.gain_b;
<< asyncResults_.temperatureK
<< " with gains r " << asyncResults_.gainR
<< " and b " << asyncResults_.gainB;
}
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Awb(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -19,59 +19,59 @@ namespace RPiController {
// Control algorithm to perform AWB calculations.
struct AwbMode {
void Read(boost::property_tree::ptree const &params);
double ct_lo; // low CT value for search
double ct_hi; // high CT value for search
void read(boost::property_tree::ptree const &params);
double ctLo; // low CT value for search
double ctHi; // high CT value for search
};
struct AwbPrior {
void Read(boost::property_tree::ptree const &params);
void read(boost::property_tree::ptree const &params);
double lux; // lux level
Pwl prior; // maps CT to prior log likelihood for this lux level
};
struct AwbConfig {
AwbConfig() : default_mode(nullptr) {}
void Read(boost::property_tree::ptree const &params);
AwbConfig() : defaultMode(nullptr) {}
void read(boost::property_tree::ptree const &params);
// Only repeat the AWB calculation every "this many" frames
uint16_t frame_period;
uint16_t framePeriod;
// number of initial frames for which speed taken as 1.0 (maximum)
uint16_t startup_frames;
unsigned int convergence_frames; // approx number of frames to converge
uint16_t startupFrames;
unsigned int convergenceFrames; // approx number of frames to converge
double speed; // IIR filter speed applied to algorithm results
bool fast; // "fast" mode uses a 16x16 rather than 32x32 grid
Pwl ct_r; // function maps CT to r (= R/G)
Pwl ct_b; // function maps CT to b (= B/G)
Pwl ctR; // function maps CT to r (= R/G)
Pwl ctB; // function maps CT to b (= B/G)
// table of illuminant priors at different lux levels
std::vector<AwbPrior> priors;
// AWB "modes" (determines the search range)
std::map<std::string, AwbMode> modes;
AwbMode *default_mode; // mode used if no mode selected
AwbMode *defaultMode; // mode used if no mode selected
// minimum proportion of pixels counted within AWB region for it to be
// "useful"
double min_pixels;
double minPixels;
// minimum G value of those pixels, to be regarded a "useful"
uint16_t min_G;
uint16_t minG;
// number of AWB regions that must be "useful" in order to do the AWB
// calculation
uint32_t min_regions;
uint32_t minRegions;
// clamp on colour error term (so as not to penalise non-grey excessively)
double delta_limit;
double deltaLimit;
// step size control in coarse search
double coarse_step;
double coarseStep;
// how far to wander off CT curve towards "more purple"
double transverse_pos;
double transversePos;
// how far to wander off CT curve towards "more green"
double transverse_neg;
double transverseNeg;
// red sensitivity ratio (set to canonical sensor's R/G divided by this
// sensor's R/G)
double sensitivity_r;
double sensitivityR;
// blue sensitivity ratio (set to canonical sensor's B/G divided by this
// sensor's B/G)
double sensitivity_b;
double sensitivityB;
// The whitepoint (which we normally "aim" for) can be moved.
double whitepoint_r;
double whitepoint_b;
double whitepointR;
double whitepointB;
bool bayes; // use Bayesian algorithm
};
@ -80,22 +80,22 @@ class Awb : public AwbAlgorithm
public:
Awb(Controller *controller = NULL);
~Awb();
char const *Name() const override;
void Initialise() override;
void Read(boost::property_tree::ptree const &params) override;
char const *name() const override;
void initialise() override;
void read(boost::property_tree::ptree const &params) override;
// AWB handles "pausing" for itself.
bool IsPaused() const override;
void Pause() override;
void Resume() override;
unsigned int GetConvergenceFrames() const override;
void SetMode(std::string const &name) override;
void SetManualGains(double manual_r, double manual_b) override;
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
bool isPaused() const override;
void pause() override;
void resume() override;
unsigned int getConvergenceFrames() const override;
void setMode(std::string const &name) override;
void setManualGains(double manualR, double manualB) override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
struct RGB {
RGB(double _R = 0, double _G = 0, double _B = 0)
: R(_R), G(_G), B(_B)
RGB(double r = 0, double g = 0, double b = 0)
: R(r), G(g), B(b)
{
}
double R, G, B;
@ -110,29 +110,29 @@ private:
bool isAutoEnabled() const;
// configuration is read-only, and available to both threads
AwbConfig config_;
std::thread async_thread_;
std::thread asyncThread_;
void asyncFunc(); // asynchronous thread function
std::mutex mutex_;
// condvar for async thread to wait on
std::condition_variable async_signal_;
std::condition_variable asyncSignal_;
// condvar for synchronous thread to wait on
std::condition_variable sync_signal_;
std::condition_variable syncSignal_;
// for sync thread to check if async thread finished (requires mutex)
bool async_finished_;
bool asyncFinished_;
// for async thread to check if it's been told to run (requires mutex)
bool async_start_;
bool asyncStart_;
// for async thread to check if it's been told to quit (requires mutex)
bool async_abort_;
bool asyncAbort_;
// The following are only for the synchronous thread to use:
// for sync thread to note its has asked async thread to run
bool async_started_;
// counts up to frame_period before restarting the async thread
int frame_phase_;
int frame_count_; // counts up to startup_frames
AwbStatus sync_results_;
AwbStatus prev_sync_results_;
std::string mode_name_;
bool asyncStarted_;
// counts up to framePeriod before restarting the async thread
int framePhase_;
int frameCount_; // counts up to startup_frames
AwbStatus syncResults_;
AwbStatus prevSyncResults_;
std::string modeName_;
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
void restartAsync(StatisticsPtr &stats, double lux);
@ -141,22 +141,22 @@ private:
StatisticsPtr statistics_;
AwbMode *mode_;
double lux_;
AwbStatus async_results_;
AwbStatus asyncResults_;
void doAwb();
void awbBayes();
void awbGrey();
void prepareStats();
double computeDelta2Sum(double gain_r, double gain_b);
double computeDelta2Sum(double gainR, double gainB);
Pwl interpolatePrior();
double coarseSearch(Pwl const &prior);
void fineSearch(double &t, double &r, double &b, Pwl const &prior);
std::vector<RGB> zones_;
std::vector<Pwl::Point> points_;
// manual r setting
double manual_r_;
double manualR_;
// manual b setting
double manual_b_;
bool first_switch_mode_; // is this the first call to SwitchMode?
double manualB_;
bool firstSwitchMode_; // is this the first call to SwitchMode?
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)

View file

@ -26,38 +26,38 @@ BlackLevel::BlackLevel(Controller *controller)
{
}
char const *BlackLevel::Name() const
char const *BlackLevel::name() const
{
return NAME;
}
void BlackLevel::Read(boost::property_tree::ptree const &params)
void BlackLevel::read(boost::property_tree::ptree const &params)
{
uint16_t black_level = params.get<uint16_t>(
uint16_t blackLevel = params.get<uint16_t>(
"black_level", 4096); // 64 in 10 bits scaled to 16 bits
black_level_r_ = params.get<uint16_t>("black_level_r", black_level);
black_level_g_ = params.get<uint16_t>("black_level_g", black_level);
black_level_b_ = params.get<uint16_t>("black_level_b", black_level);
blackLevelR_ = params.get<uint16_t>("black_level_r", blackLevel);
blackLevelG_ = params.get<uint16_t>("black_level_g", blackLevel);
blackLevelB_ = params.get<uint16_t>("black_level_b", blackLevel);
LOG(RPiBlackLevel, Debug)
<< " Read black levels red " << black_level_r_
<< " green " << black_level_g_
<< " blue " << black_level_b_;
<< " Read black levels red " << blackLevelR_
<< " green " << blackLevelG_
<< " blue " << blackLevelB_;
}
void BlackLevel::Prepare(Metadata *image_metadata)
void BlackLevel::prepare(Metadata *imageMetadata)
{
// Possibly we should think about doing this in a switch_mode or
// Possibly we should think about doing this in a switchMode or
// something?
struct BlackLevelStatus status;
status.black_level_r = black_level_r_;
status.black_level_g = black_level_g_;
status.black_level_b = black_level_b_;
image_metadata->Set("black_level.status", status);
status.blackLevelR = blackLevelR_;
status.blackLevelG = blackLevelG_;
status.blackLevelB = blackLevelB_;
imageMetadata->set("black_level.status", status);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return new BlackLevel(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -17,14 +17,14 @@ class BlackLevel : public Algorithm
{
public:
BlackLevel(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
private:
double black_level_r_;
double black_level_g_;
double black_level_b_;
double blackLevelR_;
double blackLevelG_;
double blackLevelB_;
};
} // namespace RPiController

View file

@ -37,7 +37,7 @@ Matrix::Matrix(double m0, double m1, double m2, double m3, double m4, double m5,
m[0][0] = m0, m[0][1] = m1, m[0][2] = m2, m[1][0] = m3, m[1][1] = m4,
m[1][2] = m5, m[2][0] = m6, m[2][1] = m7, m[2][2] = m8;
}
void Matrix::Read(boost::property_tree::ptree const &params)
void Matrix::read(boost::property_tree::ptree const &params)
{
double *ptr = (double *)m;
int n = 0;
@ -53,47 +53,49 @@ void Matrix::Read(boost::property_tree::ptree const &params)
Ccm::Ccm(Controller *controller)
: CcmAlgorithm(controller), saturation_(1.0) {}
char const *Ccm::Name() const
char const *Ccm::name() const
{
return NAME;
}
void Ccm::Read(boost::property_tree::ptree const &params)
void Ccm::read(boost::property_tree::ptree const &params)
{
if (params.get_child_optional("saturation"))
config_.saturation.Read(params.get_child("saturation"));
config_.saturation.read(params.get_child("saturation"));
for (auto &p : params.get_child("ccms")) {
CtCcm ct_ccm;
ct_ccm.ct = p.second.get<double>("ct");
ct_ccm.ccm.Read(p.second.get_child("ccm"));
CtCcm ctCcm;
ctCcm.ct = p.second.get<double>("ct");
ctCcm.ccm.read(p.second.get_child("ccm"));
if (!config_.ccms.empty() &&
ct_ccm.ct <= config_.ccms.back().ct)
ctCcm.ct <= config_.ccms.back().ct)
throw std::runtime_error(
"Ccm: CCM not in increasing colour temperature order");
config_.ccms.push_back(std::move(ct_ccm));
config_.ccms.push_back(std::move(ctCcm));
}
if (config_.ccms.empty())
throw std::runtime_error("Ccm: no CCMs specified");
}
void Ccm::SetSaturation(double saturation)
void Ccm::setSaturation(double saturation)
{
saturation_ = saturation;
}
void Ccm::Initialise() {}
void Ccm::initialise()
{
}
template<typename T>
static bool get_locked(Metadata *metadata, std::string const &tag, T &value)
static bool getLocked(Metadata *metadata, std::string const &tag, T &value)
{
T *ptr = metadata->GetLocked<T>(tag);
T *ptr = metadata->getLocked<T>(tag);
if (ptr == nullptr)
return false;
value = *ptr;
return true;
}
Matrix calculate_ccm(std::vector<CtCcm> const &ccms, double ct)
Matrix calculateCcm(std::vector<CtCcm> const &ccms, double ct)
{
if (ct <= ccms.front().ct)
return ccms.front().ccm;
@ -109,7 +111,7 @@ Matrix calculate_ccm(std::vector<CtCcm> const &ccms, double ct)
}
}
Matrix apply_saturation(Matrix const &ccm, double saturation)
Matrix applySaturation(Matrix const &ccm, double saturation)
{
Matrix RGB2Y(0.299, 0.587, 0.114, -0.169, -0.331, 0.500, 0.500, -0.419,
-0.081);
@ -119,51 +121,51 @@ Matrix apply_saturation(Matrix const &ccm, double saturation)
return Y2RGB * S * RGB2Y * ccm;
}
void Ccm::Prepare(Metadata *image_metadata)
void Ccm::prepare(Metadata *imageMetadata)
{
bool awb_ok = false, lux_ok = false;
bool awbOk = false, luxOk = false;
struct AwbStatus awb = {};
awb.temperature_K = 4000; // in case no metadata
awb.temperatureK = 4000; // in case no metadata
struct LuxStatus lux = {};
lux.lux = 400; // in case no metadata
{
// grab mutex just once to get everything
std::lock_guard<Metadata> lock(*image_metadata);
awb_ok = get_locked(image_metadata, "awb.status", awb);
lux_ok = get_locked(image_metadata, "lux.status", lux);
std::lock_guard<Metadata> lock(*imageMetadata);
awbOk = getLocked(imageMetadata, "awb.status", awb);
luxOk = getLocked(imageMetadata, "lux.status", lux);
}
if (!awb_ok)
if (!awbOk)
LOG(RPiCcm, Warning) << "no colour temperature found";
if (!lux_ok)
if (!luxOk)
LOG(RPiCcm, Warning) << "no lux value found";
Matrix ccm = calculate_ccm(config_.ccms, awb.temperature_K);
Matrix ccm = calculateCcm(config_.ccms, awb.temperatureK);
double saturation = saturation_;
struct CcmStatus ccm_status;
ccm_status.saturation = saturation;
if (!config_.saturation.Empty())
saturation *= config_.saturation.Eval(
config_.saturation.Domain().Clip(lux.lux));
ccm = apply_saturation(ccm, saturation);
struct CcmStatus ccmStatus;
ccmStatus.saturation = saturation;
if (!config_.saturation.empty())
saturation *= config_.saturation.eval(
config_.saturation.domain().clip(lux.lux));
ccm = applySaturation(ccm, saturation);
for (int j = 0; j < 3; j++)
for (int i = 0; i < 3; i++)
ccm_status.matrix[j * 3 + i] =
ccmStatus.matrix[j * 3 + i] =
std::max(-8.0, std::min(7.9999, ccm.m[j][i]));
LOG(RPiCcm, Debug)
<< "colour temperature " << awb.temperature_K << "K";
<< "colour temperature " << awb.temperatureK << "K";
LOG(RPiCcm, Debug)
<< "CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
<< " " << ccm_status.matrix[2] << " "
<< ccm_status.matrix[3] << " " << ccm_status.matrix[4]
<< " " << ccm_status.matrix[5] << " "
<< ccm_status.matrix[6] << " " << ccm_status.matrix[7]
<< " " << ccm_status.matrix[8];
image_metadata->Set("ccm.status", ccm_status);
<< "CCM: " << ccmStatus.matrix[0] << " " << ccmStatus.matrix[1]
<< " " << ccmStatus.matrix[2] << " "
<< ccmStatus.matrix[3] << " " << ccmStatus.matrix[4]
<< " " << ccmStatus.matrix[5] << " "
<< ccmStatus.matrix[6] << " " << ccmStatus.matrix[7]
<< " " << ccmStatus.matrix[8];
imageMetadata->set("ccm.status", ccmStatus);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Ccm(controller);
;
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -20,7 +20,7 @@ struct Matrix {
double m6, double m7, double m8);
Matrix();
double m[3][3];
void Read(boost::property_tree::ptree const &params);
void read(boost::property_tree::ptree const &params);
};
static inline Matrix operator*(double d, Matrix const &m)
{
@ -61,11 +61,11 @@ class Ccm : public CcmAlgorithm
{
public:
Ccm(Controller *controller = NULL);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void SetSaturation(double saturation) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void setSaturation(double saturation) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
private:
CcmConfig config_;

View file

@ -31,40 +31,40 @@ Contrast::Contrast(Controller *controller)
{
}
char const *Contrast::Name() const
char const *Contrast::name() const
{
return NAME;
}
void Contrast::Read(boost::property_tree::ptree const &params)
void Contrast::read(boost::property_tree::ptree const &params)
{
// enable adaptive enhancement by default
config_.ce_enable = params.get<int>("ce_enable", 1);
config_.ceEnable = params.get<int>("ce_enable", 1);
// the point near the bottom of the histogram to move
config_.lo_histogram = params.get<double>("lo_histogram", 0.01);
config_.loHistogram = params.get<double>("lo_histogram", 0.01);
// where in the range to try and move it to
config_.lo_level = params.get<double>("lo_level", 0.015);
config_.loLevel = params.get<double>("lo_level", 0.015);
// but don't move by more than this
config_.lo_max = params.get<double>("lo_max", 500);
config_.loMax = params.get<double>("lo_max", 500);
// equivalent values for the top of the histogram...
config_.hi_histogram = params.get<double>("hi_histogram", 0.95);
config_.hi_level = params.get<double>("hi_level", 0.95);
config_.hi_max = params.get<double>("hi_max", 2000);
config_.gamma_curve.Read(params.get_child("gamma_curve"));
config_.hiHistogram = params.get<double>("hi_histogram", 0.95);
config_.hiLevel = params.get<double>("hi_level", 0.95);
config_.hiMax = params.get<double>("hi_max", 2000);
config_.gammaCurve.read(params.get_child("gamma_curve"));
}
void Contrast::SetBrightness(double brightness)
void Contrast::setBrightness(double brightness)
{
brightness_ = brightness;
}
void Contrast::SetContrast(double contrast)
void Contrast::setContrast(double contrast)
{
contrast_ = contrast;
}
static void fill_in_status(ContrastStatus &status, double brightness,
double contrast, Pwl &gamma_curve)
static void fillInStatus(ContrastStatus &status, double brightness,
double contrast, Pwl &gammaCurve)
{
status.brightness = brightness;
status.contrast = contrast;
@ -73,104 +73,100 @@ static void fill_in_status(ContrastStatus &status, double brightness,
: (i < 24 ? (i - 16) * 2048 + 16384
: (i - 24) * 4096 + 32768);
status.points[i].x = x;
status.points[i].y = std::min(65535.0, gamma_curve.Eval(x));
status.points[i].y = std::min(65535.0, gammaCurve.eval(x));
}
status.points[CONTRAST_NUM_POINTS - 1].x = 65535;
status.points[CONTRAST_NUM_POINTS - 1].y = 65535;
}
void Contrast::Initialise()
void Contrast::initialise()
{
// Fill in some default values as Prepare will run before Process gets
// called.
fill_in_status(status_, brightness_, contrast_, config_.gamma_curve);
fillInStatus(status_, brightness_, contrast_, config_.gammaCurve);
}
void Contrast::Prepare(Metadata *image_metadata)
void Contrast::prepare(Metadata *imageMetadata)
{
std::unique_lock<std::mutex> lock(mutex_);
image_metadata->Set("contrast.status", status_);
imageMetadata->set("contrast.status", status_);
}
Pwl compute_stretch_curve(Histogram const &histogram,
ContrastConfig const &config)
Pwl computeStretchCurve(Histogram const &histogram,
ContrastConfig const &config)
{
Pwl enhance;
enhance.Append(0, 0);
enhance.append(0, 0);
// If the start of the histogram is rather empty, try to pull it down a
// bit.
double hist_lo = histogram.Quantile(config.lo_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_lo = config.lo_level * 65536;
double histLo = histogram.quantile(config.loHistogram) *
(65536 / NUM_HISTOGRAM_BINS);
double levelLo = config.loLevel * 65536;
LOG(RPiContrast, Debug)
<< "Move histogram point " << hist_lo << " to " << level_lo;
hist_lo = std::max(
level_lo,
std::min(65535.0, std::min(hist_lo, level_lo + config.lo_max)));
<< "Move histogram point " << histLo << " to " << levelLo;
histLo = std::max(levelLo,
std::min(65535.0, std::min(histLo, levelLo + config.loMax)));
LOG(RPiContrast, Debug)
<< "Final values " << hist_lo << " -> " << level_lo;
enhance.Append(hist_lo, level_lo);
<< "Final values " << histLo << " -> " << levelLo;
enhance.append(histLo, levelLo);
// Keep the mid-point (median) in the same place, though, to limit the
// apparent amount of global brightness shift.
double mid = histogram.Quantile(0.5) * (65536 / NUM_HISTOGRAM_BINS);
enhance.Append(mid, mid);
double mid = histogram.quantile(0.5) * (65536 / NUM_HISTOGRAM_BINS);
enhance.append(mid, mid);
// If the top to the histogram is empty, try to pull the pixel values
// there up.
double hist_hi = histogram.Quantile(config.hi_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_hi = config.hi_level * 65536;
double histHi = histogram.quantile(config.hiHistogram) *
(65536 / NUM_HISTOGRAM_BINS);
double levelHi = config.hiLevel * 65536;
LOG(RPiContrast, Debug)
<< "Move histogram point " << hist_hi << " to " << level_hi;
hist_hi = std::min(
level_hi,
std::max(0.0, std::max(hist_hi, level_hi - config.hi_max)));
<< "Move histogram point " << histHi << " to " << levelHi;
histHi = std::min(levelHi,
std::max(0.0, std::max(histHi, levelHi - config.hiMax)));
LOG(RPiContrast, Debug)
<< "Final values " << hist_hi << " -> " << level_hi;
enhance.Append(hist_hi, level_hi);
enhance.Append(65535, 65535);
<< "Final values " << histHi << " -> " << levelHi;
enhance.append(histHi, levelHi);
enhance.append(65535, 65535);
return enhance;
}
Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
double contrast)
Pwl applyManualContrast(Pwl const &gammaCurve, double brightness,
double contrast)
{
Pwl new_gamma_curve;
Pwl newGammaCurve;
LOG(RPiContrast, Debug)
<< "Manual brightness " << brightness << " contrast " << contrast;
gamma_curve.Map([&](double x, double y) {
new_gamma_curve.Append(
gammaCurve.map([&](double x, double y) {
newGammaCurve.append(
x, std::max(0.0, std::min(65535.0,
(y - 32768) * contrast +
32768 + brightness)));
});
return new_gamma_curve;
return newGammaCurve;
}
void Contrast::Process(StatisticsPtr &stats,
[[maybe_unused]] Metadata *image_metadata)
void Contrast::process(StatisticsPtr &stats,
[[maybe_unused]] Metadata *imageMetadata)
{
Histogram histogram(stats->hist[0].g_hist, NUM_HISTOGRAM_BINS);
// We look at the histogram and adjust the gamma curve in the following
// ways: 1. Adjust the gamma curve so as to pull the start of the
// histogram down, and possibly push the end up.
Pwl gamma_curve = config_.gamma_curve;
if (config_.ce_enable) {
if (config_.lo_max != 0 || config_.hi_max != 0)
gamma_curve = compute_stretch_curve(histogram, config_)
.Compose(gamma_curve);
Pwl gammaCurve = config_.gammaCurve;
if (config_.ceEnable) {
if (config_.loMax != 0 || config_.hiMax != 0)
gammaCurve = computeStretchCurve(histogram, config_).compose(gammaCurve);
// We could apply other adjustments (e.g. partial equalisation)
// based on the histogram...?
}
// 2. Finally apply any manually selected brightness/contrast
// adjustment.
if (brightness_ != 0 || contrast_ != 1.0)
gamma_curve = apply_manual_contrast(gamma_curve, brightness_,
contrast_);
gammaCurve = applyManualContrast(gammaCurve, brightness_, contrast_);
// And fill in the status for output. Use more points towards the bottom
// of the curve.
ContrastStatus status;
fill_in_status(status, brightness_, contrast_, gamma_curve);
fillInStatus(status, brightness_, contrast_, gammaCurve);
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
@ -178,8 +174,8 @@ void Contrast::Process(StatisticsPtr &stats,
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Contrast(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -17,27 +17,27 @@ namespace RPiController {
// Back End AWB.
struct ContrastConfig {
bool ce_enable;
double lo_histogram;
double lo_level;
double lo_max;
double hi_histogram;
double hi_level;
double hi_max;
Pwl gamma_curve;
bool ceEnable;
double loHistogram;
double loLevel;
double loMax;
double hiHistogram;
double hiLevel;
double hiMax;
Pwl gammaCurve;
};
class Contrast : public ContrastAlgorithm
{
public:
Contrast(Controller *controller = NULL);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void SetBrightness(double brightness) override;
void SetContrast(double contrast) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void setBrightness(double brightness) override;
void setContrast(double contrast) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
ContrastConfig config_;

View file

@ -24,30 +24,30 @@ Dpc::Dpc(Controller *controller)
{
}
char const *Dpc::Name() const
char const *Dpc::name() const
{
return NAME;
}
void Dpc::Read(boost::property_tree::ptree const &params)
void Dpc::read(boost::property_tree::ptree const &params)
{
config_.strength = params.get<int>("strength", 1);
if (config_.strength < 0 || config_.strength > 2)
throw std::runtime_error("Dpc: bad strength value");
}
void Dpc::Prepare(Metadata *image_metadata)
void Dpc::prepare(Metadata *imageMetadata)
{
DpcStatus dpc_status = {};
DpcStatus dpcStatus = {};
// Should we vary this with lux level or analogue gain? TBD.
dpc_status.strength = config_.strength;
LOG(RPiDpc, Debug) << "strength " << dpc_status.strength;
image_metadata->Set("dpc.status", dpc_status);
dpcStatus.strength = config_.strength;
LOG(RPiDpc, Debug) << "strength " << dpcStatus.strength;
imageMetadata->set("dpc.status", dpcStatus);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Dpc(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -21,9 +21,9 @@ class Dpc : public Algorithm
{
public:
Dpc(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
private:
DpcConfig config_;

View file

@ -23,28 +23,28 @@ Focus::Focus(Controller *controller)
{
}
char const *Focus::Name() const
char const *Focus::name() const
{
return NAME;
}
void Focus::Process(StatisticsPtr &stats, Metadata *image_metadata)
void Focus::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
FocusStatus status;
unsigned int i;
for (i = 0; i < FOCUS_REGIONS; i++)
status.focus_measures[i] = stats->focus_stats[i].contrast_val[1][1] / 1000;
status.focusMeasures[i] = stats->focus_stats[i].contrast_val[1][1] / 1000;
status.num = i;
image_metadata->Set("focus.status", status);
imageMetadata->set("focus.status", status);
LOG(RPiFocus, Debug)
<< "Focus contrast measure: "
<< (status.focus_measures[5] + status.focus_measures[6]) / 10;
<< (status.focusMeasures[5] + status.focusMeasures[6]) / 10;
}
/* Register algorithm with the system. */
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return new Focus(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -21,8 +21,8 @@ class Focus : public Algorithm
{
public:
Focus(Controller *controller);
char const *Name() const override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
char const *name() const override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
};
} /* namespace RPiController */

View file

@ -28,54 +28,52 @@ Geq::Geq(Controller *controller)
{
}
char const *Geq::Name() const
char const *Geq::name() const
{
return NAME;
}
void Geq::Read(boost::property_tree::ptree const &params)
void Geq::read(boost::property_tree::ptree const &params)
{
config_.offset = params.get<uint16_t>("offset", 0);
config_.slope = params.get<double>("slope", 0.0);
if (config_.slope < 0.0 || config_.slope >= 1.0)
throw std::runtime_error("Geq: bad slope value");
if (params.get_child_optional("strength"))
config_.strength.Read(params.get_child("strength"));
config_.strength.read(params.get_child("strength"));
}
void Geq::Prepare(Metadata *image_metadata)
void Geq::prepare(Metadata *imageMetadata)
{
LuxStatus lux_status = {};
lux_status.lux = 400;
if (image_metadata->Get("lux.status", lux_status))
LuxStatus luxStatus = {};
luxStatus.lux = 400;
if (imageMetadata->get("lux.status", luxStatus))
LOG(RPiGeq, Warning) << "no lux data found";
DeviceStatus device_status;
device_status.analogue_gain = 1.0; // in case not found
if (image_metadata->Get("device.status", device_status))
DeviceStatus deviceStatus;
deviceStatus.analogueGain = 1.0; // in case not found
if (imageMetadata->get("device.status", deviceStatus))
LOG(RPiGeq, Warning)
<< "no device metadata - use analogue gain of 1x";
GeqStatus geq_status = {};
double strength =
config_.strength.Empty()
GeqStatus geqStatus = {};
double strength = config_.strength.empty()
? 1.0
: config_.strength.Eval(config_.strength.Domain().Clip(
lux_status.lux));
strength *= device_status.analogue_gain;
: config_.strength.eval(config_.strength.domain().clip(luxStatus.lux));
strength *= deviceStatus.analogueGain;
double offset = config_.offset * strength;
double slope = config_.slope * strength;
geq_status.offset = std::min(65535.0, std::max(0.0, offset));
geq_status.slope = std::min(.99999, std::max(0.0, slope));
geqStatus.offset = std::min(65535.0, std::max(0.0, offset));
geqStatus.slope = std::min(.99999, std::max(0.0, slope));
LOG(RPiGeq, Debug)
<< "offset " << geq_status.offset << " slope "
<< geq_status.slope << " (analogue gain "
<< device_status.analogue_gain << " lux "
<< lux_status.lux << ")";
image_metadata->Set("geq.status", geq_status);
<< "offset " << geqStatus.offset << " slope "
<< geqStatus.slope << " (analogue gain "
<< deviceStatus.analogueGain << " lux "
<< luxStatus.lux << ")";
imageMetadata->set("geq.status", geqStatus);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Geq(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -23,9 +23,9 @@ class Geq : public Algorithm
{
public:
Geq(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
private:
GeqConfig config_;

View file

@ -31,74 +31,74 @@ Lux::Lux(Controller *controller)
status_.lux = 400;
}
char const *Lux::Name() const
char const *Lux::name() const
{
return NAME;
}
void Lux::Read(boost::property_tree::ptree const &params)
void Lux::read(boost::property_tree::ptree const &params)
{
reference_shutter_speed_ =
referenceShutterSpeed_ =
params.get<double>("reference_shutter_speed") * 1.0us;
reference_gain_ = params.get<double>("reference_gain");
reference_aperture_ = params.get<double>("reference_aperture", 1.0);
reference_Y_ = params.get<double>("reference_Y");
reference_lux_ = params.get<double>("reference_lux");
current_aperture_ = reference_aperture_;
referenceGain_ = params.get<double>("reference_gain");
referenceAperture_ = params.get<double>("reference_aperture", 1.0);
referenceY_ = params.get<double>("reference_Y");
referenceLux_ = params.get<double>("reference_lux");
currentAperture_ = referenceAperture_;
}
void Lux::SetCurrentAperture(double aperture)
void Lux::setCurrentAperture(double aperture)
{
current_aperture_ = aperture;
currentAperture_ = aperture;
}
void Lux::Prepare(Metadata *image_metadata)
void Lux::prepare(Metadata *imageMetadata)
{
std::unique_lock<std::mutex> lock(mutex_);
image_metadata->Set("lux.status", status_);
imageMetadata->set("lux.status", status_);
}
void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
void Lux::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
double current_gain = device_status.analogue_gain;
double current_aperture = device_status.aperture.value_or(current_aperture_);
DeviceStatus deviceStatus;
if (imageMetadata->get("device.status", deviceStatus) == 0) {
double currentGain = deviceStatus.analogueGain;
double currentAperture = deviceStatus.aperture.value_or(currentAperture_);
uint64_t sum = 0;
uint32_t num = 0;
uint32_t *bin = stats->hist[0].g_hist;
const int num_bins = sizeof(stats->hist[0].g_hist) /
sizeof(stats->hist[0].g_hist[0]);
for (int i = 0; i < num_bins; i++)
const int numBins = sizeof(stats->hist[0].g_hist) /
sizeof(stats->hist[0].g_hist[0]);
for (int i = 0; i < numBins; i++)
sum += bin[i] * (uint64_t)i, num += bin[i];
// add .5 to reflect the mid-points of bins
double current_Y = sum / (double)num + .5;
double gain_ratio = reference_gain_ / current_gain;
double shutter_speed_ratio =
reference_shutter_speed_ / device_status.shutter_speed;
double aperture_ratio = reference_aperture_ / current_aperture;
double Y_ratio = current_Y * (65536 / num_bins) / reference_Y_;
double estimated_lux = shutter_speed_ratio * gain_ratio *
aperture_ratio * aperture_ratio *
Y_ratio * reference_lux_;
double currentY = sum / (double)num + .5;
double gainRatio = referenceGain_ / currentGain;
double shutterSpeedRatio =
referenceShutterSpeed_ / deviceStatus.shutterSpeed;
double apertureRatio = referenceAperture_ / currentAperture;
double yRatio = currentY * (65536 / numBins) / referenceY_;
double estimatedLux = shutterSpeedRatio * gainRatio *
apertureRatio * apertureRatio *
yRatio * referenceLux_;
LuxStatus status;
status.lux = estimated_lux;
status.aperture = current_aperture;
LOG(RPiLux, Debug) << ": estimated lux " << estimated_lux;
status.lux = estimatedLux;
status.aperture = currentAperture;
LOG(RPiLux, Debug) << ": estimated lux " << estimatedLux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
}
// Overwrite the metadata here as well, so that downstream
// algorithms get the latest value.
image_metadata->Set("lux.status", status);
imageMetadata->set("lux.status", status);
} else
LOG(RPiLux, Warning) << ": no device metadata";
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Lux(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -21,21 +21,21 @@ class Lux : public Algorithm
{
public:
Lux(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
void SetCurrentAperture(double aperture);
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
void setCurrentAperture(double aperture);
private:
// These values define the conditions of the reference image, against
// which we compare the new image.
libcamera::utils::Duration reference_shutter_speed_;
double reference_gain_;
double reference_aperture_; // units of 1/f
double reference_Y_; // out of 65536
double reference_lux_;
double current_aperture_;
libcamera::utils::Duration referenceShutterSpeed_;
double referenceGain_;
double referenceAperture_; // units of 1/f
double referenceY_; // out of 65536
double referenceLux_;
double currentAperture_;
LuxStatus status_;
std::mutex mutex_;
};

View file

@ -22,55 +22,55 @@ LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
Noise::Noise(Controller *controller)
: Algorithm(controller), mode_factor_(1.0)
: Algorithm(controller), modeFactor_(1.0)
{
}
char const *Noise::Name() const
char const *Noise::name() const
{
return NAME;
}
void Noise::SwitchMode(CameraMode const &camera_mode,
void Noise::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
// For example, we would expect a 2x2 binned mode to have a "noise
// factor" of sqrt(2x2) = 2. (can't be less than one, right?)
mode_factor_ = std::max(1.0, camera_mode.noise_factor);
modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
void Noise::Read(boost::property_tree::ptree const &params)
void Noise::read(boost::property_tree::ptree const &params)
{
reference_constant_ = params.get<double>("reference_constant");
reference_slope_ = params.get<double>("reference_slope");
referenceConstant_ = params.get<double>("reference_constant");
referenceSlope_ = params.get<double>("reference_slope");
}
void Noise::Prepare(Metadata *image_metadata)
void Noise::prepare(Metadata *imageMetadata)
{
struct DeviceStatus device_status;
device_status.analogue_gain = 1.0; // keep compiler calm
if (image_metadata->Get("device.status", device_status) == 0) {
struct DeviceStatus deviceStatus;
deviceStatus.analogueGain = 1.0; // keep compiler calm
if (imageMetadata->get("device.status", deviceStatus) == 0) {
// There is a slight question as to exactly how the noise
// profile, specifically the constant part of it, scales. For
// now we assume it all scales the same, and we'll revisit this
// if it proves substantially wrong. NOTE: we may also want to
// make some adjustments based on the camera mode (such as
// binning), if we knew how to discover it...
double factor = sqrt(device_status.analogue_gain) / mode_factor_;
double factor = sqrt(deviceStatus.analogueGain) / modeFactor_;
struct NoiseStatus status;
status.noise_constant = reference_constant_ * factor;
status.noise_slope = reference_slope_ * factor;
image_metadata->Set("noise.status", status);
status.noiseConstant = referenceConstant_ * factor;
status.noiseSlope = referenceSlope_ * factor;
imageMetadata->set("noise.status", status);
LOG(RPiNoise, Debug)
<< "constant " << status.noise_constant
<< " slope " << status.noise_slope;
<< "constant " << status.noiseConstant
<< " slope " << status.noiseSlope;
} else
LOG(RPiNoise, Warning) << " no metadata";
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return new Noise(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -17,16 +17,16 @@ class Noise : public Algorithm
{
public:
Noise(Controller *controller);
char const *Name() const override;
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Read(boost::property_tree::ptree const &params) override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void read(boost::property_tree::ptree const &params) override;
void prepare(Metadata *imageMetadata) override;
private:
// the noise profile for analogue gain of 1.0
double reference_constant_;
double reference_slope_;
double mode_factor_;
double referenceConstant_;
double referenceSlope_;
double modeFactor_;
};
} // namespace RPiController

View file

@ -27,49 +27,51 @@ Sdn::Sdn(Controller *controller)
{
}
char const *Sdn::Name() const
char const *Sdn::name() const
{
return NAME;
}
void Sdn::Read(boost::property_tree::ptree const &params)
void Sdn::read(boost::property_tree::ptree const &params)
{
deviation_ = params.get<double>("deviation", 3.2);
strength_ = params.get<double>("strength", 0.75);
}
void Sdn::Initialise() {}
void Sdn::Prepare(Metadata *image_metadata)
void Sdn::initialise()
{
struct NoiseStatus noise_status = {};
noise_status.noise_slope = 3.0; // in case no metadata
if (image_metadata->Get("noise.status", noise_status) != 0)
}
void Sdn::prepare(Metadata *imageMetadata)
{
struct NoiseStatus noiseStatus = {};
noiseStatus.noiseSlope = 3.0; // in case no metadata
if (imageMetadata->get("noise.status", noiseStatus) != 0)
LOG(RPiSdn, Warning) << "no noise profile found";
LOG(RPiSdn, Debug)
<< "Noise profile: constant " << noise_status.noise_constant
<< " slope " << noise_status.noise_slope;
<< "Noise profile: constant " << noiseStatus.noiseConstant
<< " slope " << noiseStatus.noiseSlope;
struct DenoiseStatus status;
status.noise_constant = noise_status.noise_constant * deviation_;
status.noise_slope = noise_status.noise_slope * deviation_;
status.noiseConstant = noiseStatus.noiseConstant * deviation_;
status.noiseSlope = noiseStatus.noiseSlope * deviation_;
status.strength = strength_;
status.mode = static_cast<std::underlying_type_t<DenoiseMode>>(mode_);
image_metadata->Set("denoise.status", status);
imageMetadata->set("denoise.status", status);
LOG(RPiSdn, Debug)
<< "programmed constant " << status.noise_constant
<< " slope " << status.noise_slope
<< "programmed constant " << status.noiseConstant
<< " slope " << status.noiseSlope
<< " strength " << status.strength;
}
void Sdn::SetMode(DenoiseMode mode)
void Sdn::setMode(DenoiseMode mode)
{
// We only distinguish between off and all other modes.
mode_ = mode;
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Sdn(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -17,11 +17,11 @@ class Sdn : public DenoiseAlgorithm
{
public:
Sdn(Controller *controller = NULL);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
void SetMode(DenoiseMode mode) override;
char const *name() const override;
void read(boost::property_tree::ptree const &params) override;
void initialise() override;
void prepare(Metadata *imageMetadata) override;
void setMode(DenoiseMode mode) override;
private:
double deviation_;

View file

@ -21,23 +21,23 @@ LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
Sharpen::Sharpen(Controller *controller)
: SharpenAlgorithm(controller), user_strength_(1.0)
: SharpenAlgorithm(controller), userStrength_(1.0)
{
}
char const *Sharpen::Name() const
char const *Sharpen::name() const
{
return NAME;
}
void Sharpen::SwitchMode(CameraMode const &camera_mode,
void Sharpen::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
// can't be less than one, right?
mode_factor_ = std::max(1.0, camera_mode.noise_factor);
modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
void Sharpen::Read(boost::property_tree::ptree const &params)
void Sharpen::read(boost::property_tree::ptree const &params)
{
threshold_ = params.get<double>("threshold", 1.0);
strength_ = params.get<double>("strength", 1.0);
@ -48,38 +48,38 @@ void Sharpen::Read(boost::property_tree::ptree const &params)
<< " limit " << limit_;
}
void Sharpen::SetStrength(double strength)
void Sharpen::setStrength(double strength)
{
// Note that this function is how an application sets the overall
// sharpening "strength". We call this the "user strength" field
// as there already is a strength_ field - being an internal gain
// parameter that gets passed to the ISP control code. Negative
// values are not allowed - coerce them to zero (no sharpening).
user_strength_ = std::max(0.0, strength);
userStrength_ = std::max(0.0, strength);
}
void Sharpen::Prepare(Metadata *image_metadata)
void Sharpen::prepare(Metadata *imageMetadata)
{
// The user_strength_ affects the algorithm's internal gain directly, but
// The userStrength_ affects the algorithm's internal gain directly, but
// we adjust the limit and threshold less aggressively. Using a sqrt
// function is an arbitrary but gentle way of accomplishing this.
double user_strength_sqrt = sqrt(user_strength_);
double userStrengthSqrt = sqrt(userStrength_);
struct SharpenStatus status;
// Binned modes seem to need the sharpening toned down with this
// pipeline, thus we use the mode_factor here. Also avoid
// divide-by-zero with the user_strength_sqrt.
status.threshold = threshold_ * mode_factor_ /
std::max(0.01, user_strength_sqrt);
status.strength = strength_ / mode_factor_ * user_strength_;
status.limit = limit_ / mode_factor_ * user_strength_sqrt;
// Finally, report any application-supplied parameters that were used.
status.user_strength = user_strength_;
image_metadata->Set("sharpen.status", status);
// pipeline, thus we use the modeFactor_ here. Also avoid
// divide-by-zero with the userStrengthSqrt.
status.threshold = threshold_ * modeFactor_ /
std::max(0.01, userStrengthSqrt);
status.strength = strength_ / modeFactor_ * userStrength_;
status.limit = limit_ / modeFactor_ * userStrengthSqrt;
/* Finally, report any application-supplied parameters that were used. */
status.userStrength = userStrength_;
imageMetadata->set("sharpen.status", status);
}
// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
static Algorithm *create(Controller *controller)
{
return new Sharpen(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
static RegisterAlgorithm reg(NAME, &create);

View file

@ -17,18 +17,18 @@ class Sharpen : public SharpenAlgorithm
{
public:
Sharpen(Controller *controller);
char const *Name() const override;
void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Read(boost::property_tree::ptree const &params) override;
void SetStrength(double strength) override;
void Prepare(Metadata *image_metadata) override;
char const *name() const override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void read(boost::property_tree::ptree const &params) override;
void setStrength(double strength) override;
void prepare(Metadata *imageMetadata) override;
private:
double threshold_;
double strength_;
double limit_;
double mode_factor_;
double user_strength_;
double modeFactor_;
double userStrength_;
};
} // namespace RPiController

View file

@ -15,7 +15,7 @@ class SharpenAlgorithm : public Algorithm
public:
SharpenAlgorithm(Controller *controller) : Algorithm(controller) {}
// A sharpness control algorithm must provide the following:
virtual void SetStrength(double strength) = 0;
virtual void setStrength(double strength) = 0;
};
} // namespace RPiController

View file

@ -20,7 +20,7 @@ struct SharpenStatus {
// upper limit of the allowed sharpening response
double limit;
// The sharpening strength requested by the user or application.
double user_strength;
double userStrength;
};
#ifdef __cplusplus

View file

@ -75,40 +75,40 @@ public:
};
MdParser()
: reset_(true), bits_per_pixel_(0), num_lines_(0), line_length_bytes_(0)
: reset_(true), bitsPerPixel_(0), numLines_(0), lineLengthBytes_(0)
{
}
virtual ~MdParser() = default;
void Reset()
void reset()
{
reset_ = true;
}
void SetBitsPerPixel(int bpp)
void setBitsPerPixel(int bpp)
{
bits_per_pixel_ = bpp;
bitsPerPixel_ = bpp;
}
void SetNumLines(unsigned int num_lines)
void setNumLines(unsigned int numLines)
{
num_lines_ = num_lines;
numLines_ = numLines;
}
void SetLineLengthBytes(unsigned int num_bytes)
void setLineLengthBytes(unsigned int numBytes)
{
line_length_bytes_ = num_bytes;
lineLengthBytes_ = numBytes;
}
virtual Status Parse(libcamera::Span<const uint8_t> buffer,
virtual Status parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers) = 0;
protected:
bool reset_;
int bits_per_pixel_;
unsigned int num_lines_;
unsigned int line_length_bytes_;
int bitsPerPixel_;
unsigned int numLines_;
unsigned int lineLengthBytes_;
};
/*
@ -123,7 +123,7 @@ class MdParserSmia final : public MdParser
public:
MdParserSmia(std::initializer_list<uint32_t> registerList);
MdParser::Status Parse(libcamera::Span<const uint8_t> buffer,
MdParser::Status parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers) override;
private:
@ -133,18 +133,18 @@ private:
/*
* Note that error codes > 0 are regarded as non-fatal; codes < 0
* indicate a bad data buffer. Status codes are:
* PARSE_OK - found all registers, much happiness
* MISSING_REGS - some registers found; should this be a hard error?
* ParseOk - found all registers, much happiness
* MissingRegs - some registers found; should this be a hard error?
* The remaining codes are all hard errors.
*/
enum ParseStatus {
PARSE_OK = 0,
MISSING_REGS = 1,
NO_LINE_START = -1,
ILLEGAL_TAG = -2,
BAD_DUMMY = -3,
BAD_LINE_END = -4,
BAD_PADDING = -5
ParseOk = 0,
MissingRegs = 1,
NoLineStart = -1,
IllegalTag = -2,
BadDummy = -3,
BadLineEnd = -4,
BadPadding = -5
};
ParseStatus findRegs(libcamera::Span<const uint8_t> buffer);

View file

@ -20,12 +20,12 @@ using namespace libcamera;
* sensors, I think.
*/
constexpr unsigned int LINE_START = 0x0a;
constexpr unsigned int LINE_END_TAG = 0x07;
constexpr unsigned int REG_HI_BITS = 0xaa;
constexpr unsigned int REG_LOW_BITS = 0xa5;
constexpr unsigned int REG_VALUE = 0x5a;
constexpr unsigned int REG_SKIP = 0x55;
constexpr unsigned int LineStart = 0x0a;
constexpr unsigned int LineEndTag = 0x07;
constexpr unsigned int RegHiBits = 0xaa;
constexpr unsigned int RegLowBits = 0xa5;
constexpr unsigned int RegValue = 0x5a;
constexpr unsigned int RegSkip = 0x55;
MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
{
@ -33,7 +33,7 @@ MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
offsets_[r] = {};
}
MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
MdParser::Status MdParserSmia::parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers)
{
if (reset_) {
@ -41,7 +41,7 @@ MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
* Search again through the metadata for all the registers
* requested.
*/
ASSERT(bits_per_pixel_);
ASSERT(bitsPerPixel_);
for (const auto &kv : offsets_)
offsets_[kv.first] = {};
@ -53,7 +53,7 @@ MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
*
* In either case, we retry parsing on the next frame.
*/
if (ret != PARSE_OK)
if (ret != ParseOk)
return ERROR;
reset_ = false;
@ -76,74 +76,74 @@ MdParserSmia::ParseStatus MdParserSmia::findRegs(libcamera::Span<const uint8_t>
{
ASSERT(offsets_.size());
if (buffer[0] != LINE_START)
return NO_LINE_START;
if (buffer[0] != LineStart)
return NoLineStart;
unsigned int current_offset = 1; /* after the LINE_START */
unsigned int current_line_start = 0, current_line = 0;
unsigned int reg_num = 0, regs_done = 0;
unsigned int currentOffset = 1; /* after the LineStart */
unsigned int currentLineStart = 0, currentLine = 0;
unsigned int regNum = 0, regsDone = 0;
while (1) {
int tag = buffer[current_offset++];
int tag = buffer[currentOffset++];
if ((bits_per_pixel_ == 10 &&
(current_offset + 1 - current_line_start) % 5 == 0) ||
(bits_per_pixel_ == 12 &&
(current_offset + 1 - current_line_start) % 3 == 0)) {
if (buffer[current_offset++] != REG_SKIP)
return BAD_DUMMY;
if ((bitsPerPixel_ == 10 &&
(currentOffset + 1 - currentLineStart) % 5 == 0) ||
(bitsPerPixel_ == 12 &&
(currentOffset + 1 - currentLineStart) % 3 == 0)) {
if (buffer[currentOffset++] != RegSkip)
return BadDummy;
}
int data_byte = buffer[current_offset++];
int dataByte = buffer[currentOffset++];
if (tag == LINE_END_TAG) {
if (data_byte != LINE_END_TAG)
return BAD_LINE_END;
if (tag == LineEndTag) {
if (dataByte != LineEndTag)
return BadLineEnd;
if (num_lines_ && ++current_line == num_lines_)
return MISSING_REGS;
if (numLines_ && ++currentLine == numLines_)
return MissingRegs;
if (line_length_bytes_) {
current_offset = current_line_start + line_length_bytes_;
if (lineLengthBytes_) {
currentOffset = currentLineStart + lineLengthBytes_;
/* Require whole line to be in the buffer (if buffer size set). */
if (buffer.size() &&
current_offset + line_length_bytes_ > buffer.size())
return MISSING_REGS;
currentOffset + lineLengthBytes_ > buffer.size())
return MissingRegs;
if (buffer[current_offset] != LINE_START)
return NO_LINE_START;
if (buffer[currentOffset] != LineStart)
return NoLineStart;
} else {
/* allow a zero line length to mean "hunt for the next line" */
while (current_offset < buffer.size() &&
buffer[current_offset] != LINE_START)
current_offset++;
while (currentOffset < buffer.size() &&
buffer[currentOffset] != LineStart)
currentOffset++;
if (current_offset == buffer.size())
return NO_LINE_START;
if (currentOffset == buffer.size())
return NoLineStart;
}
/* inc current_offset to after LINE_START */
current_line_start = current_offset++;
/* inc currentOffset to after LineStart */
currentLineStart = currentOffset++;
} else {
if (tag == REG_HI_BITS)
reg_num = (reg_num & 0xff) | (data_byte << 8);
else if (tag == REG_LOW_BITS)
reg_num = (reg_num & 0xff00) | data_byte;
else if (tag == REG_SKIP)
reg_num++;
else if (tag == REG_VALUE) {
auto reg = offsets_.find(reg_num);
if (tag == RegHiBits)
regNum = (regNum & 0xff) | (dataByte << 8);
else if (tag == RegLowBits)
regNum = (regNum & 0xff00) | dataByte;
else if (tag == RegSkip)
regNum++;
else if (tag == RegValue) {
auto reg = offsets_.find(regNum);
if (reg != offsets_.end()) {
offsets_[reg_num] = current_offset - 1;
offsets_[regNum] = currentOffset - 1;
if (++regs_done == offsets_.size())
return PARSE_OK;
if (++regsDone == offsets_.size())
return ParseOk;
}
reg_num++;
regNum++;
} else
return ILLEGAL_TAG;
return IllegalTag;
}
}
}

View file

@ -208,7 +208,7 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
* that the kernel driver doesn't. We only do this the first time; we don't need
* to re-parse the metadata after a simple mode-switch for no reason.
*/
helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(settings.sensorModel));
helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::create(settings.sensorModel));
if (!helper_) {
LOG(IPARPI, Error) << "Could not create camera helper for "
<< settings.sensorModel;
@ -220,8 +220,8 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
* to setup the staggered writer class.
*/
int gainDelay, exposureDelay, vblankDelay, sensorMetadata;
helper_->GetDelays(exposureDelay, gainDelay, vblankDelay);
sensorMetadata = helper_->SensorEmbeddedDataPresent();
helper_->getDelays(exposureDelay, gainDelay, vblankDelay);
sensorMetadata = helper_->sensorEmbeddedDataPresent();
result->sensorConfig.gainDelay = gainDelay;
result->sensorConfig.exposureDelay = exposureDelay;
@ -229,8 +229,8 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
result->sensorConfig.sensorMetadata = sensorMetadata;
/* Load the tuning file for this sensor. */
controller_.Read(settings.configurationFile.c_str());
controller_.Initialise();
controller_.read(settings.configurationFile.c_str());
controller_.initialise();
/* Return the controls handled by the IPA */
ControlInfoMap::Map ctrlMap = ipaControls;
@ -249,15 +249,15 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
queueRequest(controls);
}
controller_.SwitchMode(mode_, &metadata);
controller_.switchMode(mode_, &metadata);
/* SwitchMode may supply updated exposure/gain values to use. */
AgcStatus agcStatus;
agcStatus.shutter_time = 0.0s;
agcStatus.analogue_gain = 0.0;
agcStatus.shutterTime = 0.0s;
agcStatus.analogueGain = 0.0;
metadata.Get("agc.status", agcStatus);
if (agcStatus.shutter_time && agcStatus.analogue_gain) {
metadata.get("agc.status", agcStatus);
if (agcStatus.shutterTime && agcStatus.analogueGain) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
startConfig->controls = std::move(ctrls);
@ -271,8 +271,8 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
frameCount_ = 0;
checkCount_ = 0;
if (firstStart_) {
dropFrameCount_ = helper_->HideFramesStartup();
mistrustCount_ = helper_->MistrustFramesStartup();
dropFrameCount_ = helper_->hideFramesStartup();
mistrustCount_ = helper_->mistrustFramesStartup();
/*
* Query the AGC/AWB for how many frames they may take to
@ -283,18 +283,18 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
*/
unsigned int agcConvergenceFrames = 0;
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (agc) {
agcConvergenceFrames = agc->GetConvergenceFrames();
agcConvergenceFrames = agc->getConvergenceFrames();
if (agcConvergenceFrames)
agcConvergenceFrames += mistrustCount_;
}
unsigned int awbConvergenceFrames = 0;
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
controller_.getAlgorithm("awb"));
if (awb) {
awbConvergenceFrames = awb->GetConvergenceFrames();
awbConvergenceFrames = awb->getConvergenceFrames();
if (awbConvergenceFrames)
awbConvergenceFrames += mistrustCount_;
}
@ -302,12 +302,12 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
dropFrameCount_ = std::max({ dropFrameCount_, agcConvergenceFrames, awbConvergenceFrames });
LOG(IPARPI, Debug) << "Drop " << dropFrameCount_ << " frames on startup";
} else {
dropFrameCount_ = helper_->HideFramesModeSwitch();
mistrustCount_ = helper_->MistrustFramesModeSwitch();
dropFrameCount_ = helper_->hideFramesModeSwitch();
mistrustCount_ = helper_->mistrustFramesModeSwitch();
}
startConfig->dropFrameCount = dropFrameCount_;
const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
startConfig->maxSensorFrameLengthMs = maxSensorFrameDuration.get<std::milli>();
firstStart_ = false;
@ -319,17 +319,17 @@ void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
mode_.height = sensorInfo.outputSize.height;
mode_.sensor_width = sensorInfo.activeAreaSize.width;
mode_.sensor_height = sensorInfo.activeAreaSize.height;
mode_.crop_x = sensorInfo.analogCrop.x;
mode_.crop_y = sensorInfo.analogCrop.y;
mode_.sensorWidth = sensorInfo.activeAreaSize.width;
mode_.sensorHeight = sensorInfo.activeAreaSize.height;
mode_.cropX = sensorInfo.analogCrop.x;
mode_.cropY = sensorInfo.analogCrop.y;
/*
* Calculate scaling parameters. The scale_[xy] factors are determined
* by the ratio between the crop rectangle size and the output size.
*/
mode_.scale_x = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
mode_.scale_y = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
mode_.scaleX = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
mode_.scaleY = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
/*
* We're not told by the pipeline handler how scaling is split between
@ -339,30 +339,30 @@ void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
*
* \todo Get the pipeline handle to provide the full data
*/
mode_.bin_x = std::min(2, static_cast<int>(mode_.scale_x));
mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_y));
mode_.binX = std::min(2, static_cast<int>(mode_.scaleX));
mode_.binY = std::min(2, static_cast<int>(mode_.scaleY));
/* The noise factor is the square root of the total binning factor. */
mode_.noise_factor = sqrt(mode_.bin_x * mode_.bin_y);
mode_.noiseFactor = sqrt(mode_.binX * mode_.binY);
/*
* Calculate the line length as the ratio between the line length in
* pixels and the pixel rate.
*/
mode_.line_length = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
mode_.lineLength = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
/*
* Set the frame length limits for the mode to ensure exposure and
* framerate calculations are clipped appropriately.
*/
mode_.min_frame_length = sensorInfo.minFrameLength;
mode_.max_frame_length = sensorInfo.maxFrameLength;
mode_.minFrameLength = sensorInfo.minFrameLength;
mode_.maxFrameLength = sensorInfo.maxFrameLength;
/*
* Some sensors may have different sensitivities in different modes;
* the CamHelper will know the correct value.
*/
mode_.sensitivity = helper_->GetModeSensitivity(mode_);
mode_.sensitivity = helper_->getModeSensitivity(mode_);
}
int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
@ -421,7 +421,7 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
}
/* Pass the camera mode to the CamHelper to setup algorithms. */
helper_->SetCameraMode(mode_);
helper_->setCameraMode(mode_);
/*
* Initialise this ControlList correctly, even if empty, in case the IPA is
@ -438,8 +438,8 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
/* Supply initial values for gain and exposure. */
AgcStatus agcStatus;
agcStatus.shutter_time = defaultExposureTime;
agcStatus.analogue_gain = defaultAnalogueGain;
agcStatus.shutterTime = defaultExposureTime;
agcStatus.analogueGain = defaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
}
@ -451,25 +451,25 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
* based on the current sensor mode.
*/
ControlInfoMap::Map ctrlMap = ipaControls;
const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
const Duration minSensorFrameDuration = mode_.minFrameLength * mode_.lineLength;
const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
ctrlMap[&controls::FrameDurationLimits] =
ControlInfo(static_cast<int64_t>(minSensorFrameDuration.get<std::micro>()),
static_cast<int64_t>(maxSensorFrameDuration.get<std::micro>()));
ctrlMap[&controls::AnalogueGain] =
ControlInfo(1.0f, static_cast<float>(helper_->Gain(maxSensorGainCode_)));
ControlInfo(1.0f, static_cast<float>(helper_->gain(maxSensorGainCode_)));
/*
* Calculate the max exposure limit from the frame duration limit as V4L2
* will limit the maximum control value based on the current VBLANK value.
*/
Duration maxShutter = Duration::max();
helper_->GetVBlanking(maxShutter, minSensorFrameDuration, maxSensorFrameDuration);
helper_->getVBlanking(maxShutter, minSensorFrameDuration, maxSensorFrameDuration);
const uint32_t exposureMin = sensorCtrls_.at(V4L2_CID_EXPOSURE).min().get<int32_t>();
ctrlMap[&controls::ExposureTime] =
ControlInfo(static_cast<int32_t>(helper_->Exposure(exposureMin).get<std::micro>()),
ControlInfo(static_cast<int32_t>(helper_->exposure(exposureMin).get<std::micro>()),
static_cast<int32_t>(maxShutter.get<std::micro>()));
result->controlInfo = ControlInfoMap(std::move(ctrlMap), controls::controls);
@ -536,54 +536,54 @@ void IPARPi::reportMetadata()
* processed can be extracted and placed into the libcamera metadata
* buffer, where an application could query it.
*/
DeviceStatus *deviceStatus = rpiMetadata_.GetLocked<DeviceStatus>("device.status");
DeviceStatus *deviceStatus = rpiMetadata_.getLocked<DeviceStatus>("device.status");
if (deviceStatus) {
libcameraMetadata_.set(controls::ExposureTime,
deviceStatus->shutter_speed.get<std::micro>());
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogue_gain);
deviceStatus->shutterSpeed.get<std::micro>());
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogueGain);
libcameraMetadata_.set(controls::FrameDuration,
helper_->Exposure(deviceStatus->frame_length).get<std::micro>());
if (deviceStatus->sensor_temperature)
libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensor_temperature);
helper_->exposure(deviceStatus->frameLength).get<std::micro>());
if (deviceStatus->sensorTemperature)
libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensorTemperature);
}
AgcStatus *agcStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
AgcStatus *agcStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status");
if (agcStatus) {
libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
libcameraMetadata_.set(controls::DigitalGain, agcStatus->digital_gain);
libcameraMetadata_.set(controls::DigitalGain, agcStatus->digitalGain);
}
LuxStatus *luxStatus = rpiMetadata_.GetLocked<LuxStatus>("lux.status");
LuxStatus *luxStatus = rpiMetadata_.getLocked<LuxStatus>("lux.status");
if (luxStatus)
libcameraMetadata_.set(controls::Lux, luxStatus->lux);
AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status");
if (awbStatus) {
libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gain_r),
static_cast<float>(awbStatus->gain_b) });
libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperature_K);
libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gainR),
static_cast<float>(awbStatus->gainB) });
libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperatureK);
}
BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
libcameraMetadata_.set(controls::SensorBlackLevels,
{ static_cast<int32_t>(blackLevelStatus->black_level_r),
static_cast<int32_t>(blackLevelStatus->black_level_g),
static_cast<int32_t>(blackLevelStatus->black_level_g),
static_cast<int32_t>(blackLevelStatus->black_level_b) });
{ static_cast<int32_t>(blackLevelStatus->blackLevelR),
static_cast<int32_t>(blackLevelStatus->blackLevelG),
static_cast<int32_t>(blackLevelStatus->blackLevelG),
static_cast<int32_t>(blackLevelStatus->blackLevelB) });
FocusStatus *focusStatus = rpiMetadata_.GetLocked<FocusStatus>("focus.status");
FocusStatus *focusStatus = rpiMetadata_.getLocked<FocusStatus>("focus.status");
if (focusStatus && focusStatus->num == 12) {
/*
* We get a 4x3 grid of regions by default. Calculate the average
* FoM over the central two positions to give an overall scene FoM.
* This can change later if it is not deemed suitable.
*/
int32_t focusFoM = (focusStatus->focus_measures[5] + focusStatus->focus_measures[6]) / 2;
int32_t focusFoM = (focusStatus->focusMeasures[5] + focusStatus->focusMeasures[6]) / 2;
libcameraMetadata_.set(controls::FocusFoM, focusFoM);
}
CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status");
if (ccmStatus) {
float m[9];
for (unsigned int i = 0; i < 9; i++)
@ -695,7 +695,7 @@ void IPARPi::queueRequest(const ControlList &controls)
switch (ctrl.first) {
case controls::AE_ENABLE: {
RPiController::Algorithm *agc = controller_.GetAlgorithm("agc");
RPiController::Algorithm *agc = controller_.getAlgorithm("agc");
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_ENABLE - no AGC algorithm";
@ -703,9 +703,9 @@ void IPARPi::queueRequest(const ControlList &controls)
}
if (ctrl.second.get<bool>() == false)
agc->Pause();
agc->pause();
else
agc->Resume();
agc->resume();
libcameraMetadata_.set(controls::AeEnable, ctrl.second.get<bool>());
break;
@ -713,7 +713,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_TIME: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_TIME - no AGC algorithm";
@ -721,7 +721,7 @@ void IPARPi::queueRequest(const ControlList &controls)
}
/* The control provides units of microseconds. */
agc->SetFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
agc->setFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
@ -729,14 +729,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::ANALOGUE_GAIN: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set ANALOGUE_GAIN - no AGC algorithm";
break;
}
agc->SetFixedAnalogueGain(ctrl.second.get<float>());
agc->setFixedAnalogueGain(ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
@ -745,7 +745,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_METERING_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_METERING_MODE - no AGC algorithm";
@ -754,7 +754,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (MeteringModeTable.count(idx)) {
agc->SetMeteringMode(MeteringModeTable.at(idx));
agc->setMeteringMode(MeteringModeTable.at(idx));
libcameraMetadata_.set(controls::AeMeteringMode, idx);
} else {
LOG(IPARPI, Error) << "Metering mode " << idx
@ -765,7 +765,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_CONSTRAINT_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_CONSTRAINT_MODE - no AGC algorithm";
@ -774,7 +774,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
agc->SetConstraintMode(ConstraintModeTable.at(idx));
agc->setConstraintMode(ConstraintModeTable.at(idx));
libcameraMetadata_.set(controls::AeConstraintMode, idx);
} else {
LOG(IPARPI, Error) << "Constraint mode " << idx
@ -785,7 +785,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_EXPOSURE_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_EXPOSURE_MODE - no AGC algorithm";
@ -794,7 +794,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
agc->SetExposureMode(ExposureModeTable.at(idx));
agc->setExposureMode(ExposureModeTable.at(idx));
libcameraMetadata_.set(controls::AeExposureMode, idx);
} else {
LOG(IPARPI, Error) << "Exposure mode " << idx
@ -805,7 +805,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_VALUE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_VALUE - no AGC algorithm";
@ -817,14 +817,14 @@ void IPARPi::queueRequest(const ControlList &controls)
* So convert to 2^EV
*/
double ev = pow(2.0, ctrl.second.get<float>());
agc->SetEv(ev);
agc->setEv(ev);
libcameraMetadata_.set(controls::ExposureValue,
ctrl.second.get<float>());
break;
}
case controls::AWB_ENABLE: {
RPiController::Algorithm *awb = controller_.GetAlgorithm("awb");
RPiController::Algorithm *awb = controller_.getAlgorithm("awb");
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_ENABLE - no AWB algorithm";
@ -832,9 +832,9 @@ void IPARPi::queueRequest(const ControlList &controls)
}
if (ctrl.second.get<bool>() == false)
awb->Pause();
awb->pause();
else
awb->Resume();
awb->resume();
libcameraMetadata_.set(controls::AwbEnable,
ctrl.second.get<bool>());
@ -843,7 +843,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_MODE: {
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_MODE - no AWB algorithm";
@ -852,7 +852,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (AwbModeTable.count(idx)) {
awb->SetMode(AwbModeTable.at(idx));
awb->setMode(AwbModeTable.at(idx));
libcameraMetadata_.set(controls::AwbMode, idx);
} else {
LOG(IPARPI, Error) << "AWB mode " << idx
@ -864,14 +864,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::COLOUR_GAINS: {
auto gains = ctrl.second.get<Span<const float>>();
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set COLOUR_GAINS - no AWB algorithm";
break;
}
awb->SetManualGains(gains[0], gains[1]);
awb->setManualGains(gains[0], gains[1]);
if (gains[0] != 0.0f && gains[1] != 0.0f)
/* A gain of 0.0f will switch back to auto mode. */
libcameraMetadata_.set(controls::ColourGains,
@ -881,14 +881,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::BRIGHTNESS: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set BRIGHTNESS - no contrast algorithm";
break;
}
contrast->SetBrightness(ctrl.second.get<float>() * 65536);
contrast->setBrightness(ctrl.second.get<float>() * 65536);
libcameraMetadata_.set(controls::Brightness,
ctrl.second.get<float>());
break;
@ -896,14 +896,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::CONTRAST: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set CONTRAST - no contrast algorithm";
break;
}
contrast->SetContrast(ctrl.second.get<float>());
contrast->setContrast(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Contrast,
ctrl.second.get<float>());
break;
@ -911,14 +911,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SATURATION: {
RPiController::CcmAlgorithm *ccm = dynamic_cast<RPiController::CcmAlgorithm *>(
controller_.GetAlgorithm("ccm"));
controller_.getAlgorithm("ccm"));
if (!ccm) {
LOG(IPARPI, Warning)
<< "Could not set SATURATION - no ccm algorithm";
break;
}
ccm->SetSaturation(ctrl.second.get<float>());
ccm->setSaturation(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Saturation,
ctrl.second.get<float>());
break;
@ -926,14 +926,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SHARPNESS: {
RPiController::SharpenAlgorithm *sharpen = dynamic_cast<RPiController::SharpenAlgorithm *>(
controller_.GetAlgorithm("sharpen"));
controller_.getAlgorithm("sharpen"));
if (!sharpen) {
LOG(IPARPI, Warning)
<< "Could not set SHARPNESS - no sharpen algorithm";
break;
}
sharpen->SetStrength(ctrl.second.get<float>());
sharpen->setStrength(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Sharpness,
ctrl.second.get<float>());
break;
@ -952,7 +952,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::NOISE_REDUCTION_MODE: {
RPiController::DenoiseAlgorithm *sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
controller_.GetAlgorithm("SDN"));
controller_.getAlgorithm("SDN"));
if (!sdn) {
LOG(IPARPI, Warning)
<< "Could not set NOISE_REDUCTION_MODE - no SDN algorithm";
@ -962,7 +962,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
auto mode = DenoiseModeTable.find(idx);
if (mode != DenoiseModeTable.end()) {
sdn->SetMode(mode->second);
sdn->setMode(mode->second);
/*
* \todo If the colour denoise is not going to run due to an
@ -1014,7 +1014,7 @@ void IPARPi::prepareISP(const ISPConfig &data)
* This may overwrite the DeviceStatus using values from the sensor
* metadata, and may also do additional custom processing.
*/
helper_->Prepare(embeddedBuffer, rpiMetadata_);
helper_->prepare(embeddedBuffer, rpiMetadata_);
/* Done with embedded data now, return to pipeline handler asap. */
if (data.embeddedBufferPresent)
@ -1030,7 +1030,7 @@ void IPARPi::prepareISP(const ISPConfig &data)
* current frame, or any other bits of metadata that were added
* in helper_->Prepare().
*/
rpiMetadata_.Merge(lastMetadata);
rpiMetadata_.merge(lastMetadata);
processPending_ = false;
return;
}
@ -1040,48 +1040,48 @@ void IPARPi::prepareISP(const ISPConfig &data)
ControlList ctrls(ispCtrls_);
controller_.Prepare(&rpiMetadata_);
controller_.prepare(&rpiMetadata_);
/* Lock the metadata buffer to avoid constant locks/unlocks. */
std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status");
if (awbStatus)
applyAWB(awbStatus, ctrls);
CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status");
if (ccmStatus)
applyCCM(ccmStatus, ctrls);
AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
AgcStatus *dgStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status");
if (dgStatus)
applyDG(dgStatus, ctrls);
AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
AlscStatus *lsStatus = rpiMetadata_.getLocked<AlscStatus>("alsc.status");
if (lsStatus)
applyLS(lsStatus, ctrls);
ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
ContrastStatus *contrastStatus = rpiMetadata_.getLocked<ContrastStatus>("contrast.status");
if (contrastStatus)
applyGamma(contrastStatus, ctrls);
BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
applyBlackLevel(blackLevelStatus, ctrls);
GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
GeqStatus *geqStatus = rpiMetadata_.getLocked<GeqStatus>("geq.status");
if (geqStatus)
applyGEQ(geqStatus, ctrls);
DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
DenoiseStatus *denoiseStatus = rpiMetadata_.getLocked<DenoiseStatus>("denoise.status");
if (denoiseStatus)
applyDenoise(denoiseStatus, ctrls);
SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
SharpenStatus *sharpenStatus = rpiMetadata_.getLocked<SharpenStatus>("sharpen.status");
if (sharpenStatus)
applySharpen(sharpenStatus, ctrls);
DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
DpcStatus *dpcStatus = rpiMetadata_.getLocked<DpcStatus>("dpc.status");
if (dpcStatus)
applyDPC(dpcStatus, ctrls);
@ -1097,13 +1097,13 @@ void IPARPi::fillDeviceStatus(const ControlList &sensorControls)
int32_t gainCode = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
int32_t vblank = sensorControls.get(V4L2_CID_VBLANK).get<int32_t>();
deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
deviceStatus.analogue_gain = helper_->Gain(gainCode);
deviceStatus.frame_length = mode_.height + vblank;
deviceStatus.shutterSpeed = helper_->exposure(exposureLines);
deviceStatus.analogueGain = helper_->gain(gainCode);
deviceStatus.frameLength = mode_.height + vblank;
LOG(IPARPI, Debug) << "Metadata - " << deviceStatus;
rpiMetadata_.Set("device.status", deviceStatus);
rpiMetadata_.set("device.status", deviceStatus);
}
void IPARPi::processStats(unsigned int bufferId)
@ -1117,11 +1117,11 @@ void IPARPi::processStats(unsigned int bufferId)
Span<uint8_t> mem = it->second.planes()[0];
bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats);
helper_->Process(statistics, rpiMetadata_);
controller_.Process(statistics, &rpiMetadata_);
helper_->process(statistics, rpiMetadata_);
controller_.process(statistics, &rpiMetadata_);
struct AgcStatus agcStatus;
if (rpiMetadata_.Get("agc.status", agcStatus) == 0) {
if (rpiMetadata_.get("agc.status", agcStatus) == 0) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
@ -1131,19 +1131,19 @@ void IPARPi::processStats(unsigned int bufferId)
void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
{
LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gain_r << " B: "
<< awbStatus->gain_b;
LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gainR << " B: "
<< awbStatus->gainB;
ctrls.set(V4L2_CID_RED_BALANCE,
static_cast<int32_t>(awbStatus->gain_r * 1000));
static_cast<int32_t>(awbStatus->gainR * 1000));
ctrls.set(V4L2_CID_BLUE_BALANCE,
static_cast<int32_t>(awbStatus->gain_b * 1000));
static_cast<int32_t>(awbStatus->gainB * 1000));
}
void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration)
{
const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
const Duration minSensorFrameDuration = mode_.minFrameLength * mode_.lineLength;
const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
/*
* This will only be applied once AGC recalculations occur.
@ -1164,20 +1164,20 @@ void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDur
/*
* Calculate the maximum exposure time possible for the AGC to use.
* GetVBlanking() will update maxShutter with the largest exposure
* getVBlanking() will update maxShutter with the largest exposure
* value possible.
*/
Duration maxShutter = Duration::max();
helper_->GetVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
helper_->getVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
agc->SetMaxShutter(maxShutter);
controller_.getAlgorithm("agc"));
agc->setMaxShutter(maxShutter);
}
void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
{
int32_t gainCode = helper_->GainCode(agcStatus->analogue_gain);
int32_t gainCode = helper_->gainCode(agcStatus->analogueGain);
/*
* Ensure anything larger than the max gain code will not be passed to
@ -1186,15 +1186,15 @@ void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
*/
gainCode = std::min<int32_t>(gainCode, maxSensorGainCode_);
/* GetVBlanking might clip exposure time to the fps limits. */
Duration exposure = agcStatus->shutter_time;
int32_t vblanking = helper_->GetVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
int32_t exposureLines = helper_->ExposureLines(exposure);
/* getVBlanking might clip exposure time to the fps limits. */
Duration exposure = agcStatus->shutterTime;
int32_t vblanking = helper_->getVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
int32_t exposureLines = helper_->exposureLines(exposure);
LOG(IPARPI, Debug) << "Applying AGC Exposure: " << exposure
<< " (Shutter lines: " << exposureLines << ", AGC requested "
<< agcStatus->shutter_time << ") Gain: "
<< agcStatus->analogue_gain << " (Gain Code: "
<< agcStatus->shutterTime << ") Gain: "
<< agcStatus->analogueGain << " (Gain Code: "
<< gainCode << ")";
/*
@ -1210,7 +1210,7 @@ void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
void IPARPi::applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls)
{
ctrls.set(V4L2_CID_DIGITAL_GAIN,
static_cast<int32_t>(dgStatus->digital_gain * 1000));
static_cast<int32_t>(dgStatus->digitalGain * 1000));
}
void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
@ -1250,9 +1250,9 @@ void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, Co
bcm2835_isp_black_level blackLevel;
blackLevel.enabled = 1;
blackLevel.black_level_r = blackLevelStatus->black_level_r;
blackLevel.black_level_g = blackLevelStatus->black_level_g;
blackLevel.black_level_b = blackLevelStatus->black_level_b;
blackLevel.black_level_r = blackLevelStatus->blackLevelR;
blackLevel.black_level_g = blackLevelStatus->blackLevelG;
blackLevel.black_level_b = blackLevelStatus->blackLevelB;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&blackLevel),
sizeof(blackLevel) });
@ -1281,8 +1281,8 @@ void IPARPi::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList
DenoiseMode mode = static_cast<DenoiseMode>(denoiseStatus->mode);
denoise.enabled = mode != DenoiseMode::Off;
denoise.constant = denoiseStatus->noise_constant;
denoise.slope.num = 1000 * denoiseStatus->noise_slope;
denoise.constant = denoiseStatus->noiseConstant;
denoise.slope.num = 1000 * denoiseStatus->noiseSlope;
denoise.slope.den = 1000;
denoise.strength.num = 1000 * denoiseStatus->strength;
denoise.strength.den = 1000;