libcamera: controls: Use std::optional to handle invalid control values
Previously, ControlList::get<T>() would use default constructed objects to indicate that a ControlList does not have the requested Control. This has several disadvantages: 1) It requires types to be default constructible, 2) it does not differentiate between a default constructed object and an object that happens to have the same state as a default constructed object. std::optional<T> additionally stores the information if the object is valid or not, and therefore is more expressive than a default constructed object. Signed-off-by: Christian Rauch <Rauch.Christian@gmx.de> Reviewed-by: Jacopo Mondi <jacopo@jmondi.org> Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
This commit is contained in:
parent
ef77e26379
commit
1c4d480185
10 changed files with 44 additions and 44 deletions
|
@ -8,6 +8,7 @@
|
|||
#pragma once
|
||||
|
||||
#include <assert.h>
|
||||
#include <optional>
|
||||
#include <set>
|
||||
#include <stdint.h>
|
||||
#include <string>
|
||||
|
@ -373,11 +374,11 @@ public:
|
|||
bool contains(unsigned int id) const;
|
||||
|
||||
template<typename T>
|
||||
T get(const Control<T> &ctrl) const
|
||||
std::optional<T> get(const Control<T> &ctrl) const
|
||||
{
|
||||
const ControlValue *val = find(ctrl.id());
|
||||
if (!val)
|
||||
return T{};
|
||||
return std::nullopt;
|
||||
|
||||
return val->get<T>();
|
||||
}
|
||||
|
|
|
@ -1042,7 +1042,7 @@ int CameraCapabilities::initializeStaticMetadata()
|
|||
/* Sensor static metadata. */
|
||||
std::array<int32_t, 2> pixelArraySize;
|
||||
{
|
||||
const Size &size = properties.get(properties::PixelArraySize);
|
||||
const Size &size = properties.get(properties::PixelArraySize).value_or(Size{});
|
||||
pixelArraySize[0] = size.width;
|
||||
pixelArraySize[1] = size.height;
|
||||
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
|
||||
|
@ -1050,10 +1050,10 @@ int CameraCapabilities::initializeStaticMetadata()
|
|||
}
|
||||
|
||||
if (properties.contains(properties::UnitCellSize)) {
|
||||
const Size &cellSize = properties.get<Size>(properties::UnitCellSize);
|
||||
const auto &cellSize = properties.get<Size>(properties::UnitCellSize);
|
||||
std::array<float, 2> physicalSize{
|
||||
cellSize.width * pixelArraySize[0] / 1e6f,
|
||||
cellSize.height * pixelArraySize[1] / 1e6f
|
||||
cellSize->width * pixelArraySize[0] / 1e6f,
|
||||
cellSize->height * pixelArraySize[1] / 1e6f
|
||||
};
|
||||
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
|
||||
physicalSize);
|
||||
|
@ -1061,7 +1061,7 @@ int CameraCapabilities::initializeStaticMetadata()
|
|||
|
||||
{
|
||||
const Span<const Rectangle> &rects =
|
||||
properties.get(properties::PixelArrayActiveAreas);
|
||||
properties.get(properties::PixelArrayActiveAreas).value_or(Span<const Rectangle>{});
|
||||
std::vector<int32_t> data{
|
||||
static_cast<int32_t>(rects[0].x),
|
||||
static_cast<int32_t>(rects[0].y),
|
||||
|
@ -1080,7 +1080,7 @@ int CameraCapabilities::initializeStaticMetadata()
|
|||
|
||||
/* Report the color filter arrangement if the camera reports it. */
|
||||
if (properties.contains(properties::draft::ColorFilterArrangement)) {
|
||||
uint8_t filterArr = properties.get(properties::draft::ColorFilterArrangement);
|
||||
uint8_t filterArr = *properties.get(properties::draft::ColorFilterArrangement);
|
||||
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
|
||||
filterArr);
|
||||
}
|
||||
|
|
|
@ -306,7 +306,7 @@ int CameraDevice::initialize(const CameraConfigData *cameraConfigData)
|
|||
const ControlList &properties = camera_->properties();
|
||||
|
||||
if (properties.contains(properties::Location)) {
|
||||
int32_t location = properties.get(properties::Location);
|
||||
int32_t location = *properties.get(properties::Location);
|
||||
switch (location) {
|
||||
case properties::CameraLocationFront:
|
||||
facing_ = CAMERA_FACING_FRONT;
|
||||
|
@ -356,7 +356,7 @@ int CameraDevice::initialize(const CameraConfigData *cameraConfigData)
|
|||
* metadata.
|
||||
*/
|
||||
if (properties.contains(properties::Rotation)) {
|
||||
int rotation = properties.get(properties::Rotation);
|
||||
int rotation = *properties.get(properties::Rotation);
|
||||
orientation_ = (360 - rotation) % 360;
|
||||
if (cameraConfigData && cameraConfigData->rotation != -1 &&
|
||||
orientation_ != cameraConfigData->rotation) {
|
||||
|
@ -1181,7 +1181,8 @@ void CameraDevice::requestComplete(Request *request)
|
|||
* as soon as possible, earlier than request completion time.
|
||||
*/
|
||||
uint64_t sensorTimestamp = static_cast<uint64_t>(request->metadata()
|
||||
.get(controls::SensorTimestamp));
|
||||
.get(controls::SensorTimestamp)
|
||||
.value_or(0));
|
||||
notifyShutter(descriptor->frameNumber_, sensorTimestamp);
|
||||
|
||||
LOG(HAL, Debug) << "Request " << request->cookie() << " completed with "
|
||||
|
@ -1560,29 +1561,28 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons
|
|||
rolling_shutter_skew);
|
||||
|
||||
/* Add metadata tags reported by libcamera. */
|
||||
const int64_t timestamp = metadata.get(controls::SensorTimestamp);
|
||||
const int64_t timestamp = metadata.get(controls::SensorTimestamp).value_or(0);
|
||||
resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, timestamp);
|
||||
|
||||
if (metadata.contains(controls::draft::PipelineDepth)) {
|
||||
uint8_t pipeline_depth =
|
||||
metadata.get<int32_t>(controls::draft::PipelineDepth);
|
||||
uint8_t pipeline_depth = *metadata.get<int32_t>(controls::draft::PipelineDepth);
|
||||
resultMetadata->addEntry(ANDROID_REQUEST_PIPELINE_DEPTH,
|
||||
pipeline_depth);
|
||||
}
|
||||
|
||||
if (metadata.contains(controls::ExposureTime)) {
|
||||
int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL;
|
||||
int64_t exposure = *metadata.get(controls::ExposureTime) * 1000ULL;
|
||||
resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, exposure);
|
||||
}
|
||||
|
||||
if (metadata.contains(controls::FrameDuration)) {
|
||||
int64_t duration = metadata.get(controls::FrameDuration) * 1000;
|
||||
int64_t duration = *metadata.get(controls::FrameDuration) * 1000;
|
||||
resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION,
|
||||
duration);
|
||||
}
|
||||
|
||||
if (metadata.contains(controls::ScalerCrop)) {
|
||||
Rectangle crop = metadata.get(controls::ScalerCrop);
|
||||
Rectangle crop = *metadata.get(controls::ScalerCrop);
|
||||
int32_t cropRect[] = {
|
||||
crop.x, crop.y, static_cast<int32_t>(crop.width),
|
||||
static_cast<int32_t>(crop.height),
|
||||
|
@ -1591,8 +1591,7 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons
|
|||
}
|
||||
|
||||
if (metadata.contains(controls::draft::TestPatternMode)) {
|
||||
const int32_t testPatternMode =
|
||||
metadata.get(controls::draft::TestPatternMode);
|
||||
const int32_t testPatternMode = *metadata.get(controls::draft::TestPatternMode);
|
||||
resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE,
|
||||
testPatternMode);
|
||||
}
|
||||
|
|
|
@ -232,7 +232,7 @@ int32_t CameraHalManager::cameraLocation(const Camera *cam)
|
|||
if (!properties.contains(properties::Location))
|
||||
return -1;
|
||||
|
||||
return properties.get(properties::Location);
|
||||
return *properties.get(properties::Location);
|
||||
}
|
||||
|
||||
CameraDevice *CameraHalManager::cameraDeviceFromHalId(unsigned int id)
|
||||
|
|
|
@ -301,7 +301,7 @@ std::string CamApp::cameraName(const Camera *camera)
|
|||
* is only used if the location isn't present or is set to External.
|
||||
*/
|
||||
if (props.contains(properties::Location)) {
|
||||
switch (props.get(properties::Location)) {
|
||||
switch (*props.get(properties::Location)) {
|
||||
case properties::CameraLocationFront:
|
||||
addModel = false;
|
||||
name = "Internal front camera ";
|
||||
|
@ -321,7 +321,7 @@ std::string CamApp::cameraName(const Camera *camera)
|
|||
* If the camera location is not availble use the camera model
|
||||
* to build the camera name.
|
||||
*/
|
||||
name = "'" + props.get(properties::Model) + "' ";
|
||||
name = "'" + *props.get(properties::Model) + "' ";
|
||||
}
|
||||
|
||||
name += "(" + camera->id() + ")";
|
||||
|
|
|
@ -234,7 +234,7 @@ GstLibcameraSrcState::requestCompleted(Request *request)
|
|||
}
|
||||
|
||||
if (GST_ELEMENT_CLOCK(src_)) {
|
||||
int64_t timestamp = request->metadata().get(controls::SensorTimestamp);
|
||||
int64_t timestamp = request->metadata().get(controls::SensorTimestamp).value_or(0);
|
||||
|
||||
GstClockTime gst_base_time = GST_ELEMENT(src_)->base_time;
|
||||
GstClockTime gst_now = gst_clock_get_time(GST_ELEMENT_CLOCK(src_));
|
||||
|
|
|
@ -993,7 +993,7 @@ void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
|
|||
|
||||
void IPARPi::prepareISP(const ISPConfig &data)
|
||||
{
|
||||
int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp);
|
||||
int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp).value_or(0);
|
||||
RPiController::Metadata lastMetadata;
|
||||
Span<uint8_t> embeddedBuffer;
|
||||
|
||||
|
|
|
@ -1145,7 +1145,7 @@ int PipelineHandlerIPU3::registerCameras()
|
|||
/* Convert the sensor rotation to a transformation */
|
||||
int32_t rotation = 0;
|
||||
if (data->properties_.contains(properties::Rotation))
|
||||
rotation = data->properties_.get(properties::Rotation);
|
||||
rotation = *(data->properties_.get(properties::Rotation));
|
||||
else
|
||||
LOG(IPU3, Warning) << "Rotation control not exposed by "
|
||||
<< cio2->sensor()->id()
|
||||
|
@ -1331,7 +1331,7 @@ void IPU3CameraData::imguOutputBufferReady(FrameBuffer *buffer)
|
|||
request->metadata().set(controls::draft::PipelineDepth, 3);
|
||||
/* \todo Actually apply the scaler crop region to the ImgU. */
|
||||
if (request->controls().contains(controls::ScalerCrop))
|
||||
cropRegion_ = request->controls().get(controls::ScalerCrop);
|
||||
cropRegion_ = *(request->controls().get(controls::ScalerCrop));
|
||||
request->metadata().set(controls::ScalerCrop, cropRegion_);
|
||||
|
||||
if (frameInfos_.tryComplete(info))
|
||||
|
@ -1424,7 +1424,7 @@ void IPU3CameraData::statBufferReady(FrameBuffer *buffer)
|
|||
return;
|
||||
}
|
||||
|
||||
ipa_->processStatsBuffer(info->id, request->metadata().get(controls::SensorTimestamp),
|
||||
ipa_->processStatsBuffer(info->id, request->metadata().get(controls::SensorTimestamp).value_or(0),
|
||||
info->statBuffer->cookie(), info->effectiveSensorControls);
|
||||
}
|
||||
|
||||
|
@ -1458,8 +1458,7 @@ void IPU3CameraData::frameStart(uint32_t sequence)
|
|||
if (!request->controls().contains(controls::draft::TestPatternMode))
|
||||
return;
|
||||
|
||||
const int32_t testPatternMode = request->controls().get(
|
||||
controls::draft::TestPatternMode);
|
||||
const int32_t testPatternMode = *(request->controls().get(controls::draft::TestPatternMode));
|
||||
|
||||
int ret = cio2_.sensor()->setTestPatternMode(
|
||||
static_cast<controls::draft::TestPatternModeEnum>(testPatternMode));
|
||||
|
|
|
@ -364,7 +364,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
|
|||
* error means the platform can never run. Let's just print a warning
|
||||
* and continue regardless; the rotation is effectively set to zero.
|
||||
*/
|
||||
int32_t rotation = data_->sensor_->properties().get(properties::Rotation);
|
||||
int32_t rotation = data_->sensor_->properties().get(properties::Rotation).value_or(0);
|
||||
bool success;
|
||||
Transform rotationTransform = transformFromRotation(rotation, &success);
|
||||
if (!success)
|
||||
|
@ -1718,7 +1718,8 @@ void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &
|
|||
* V4L2_CID_NOTIFY_GAINS control (which means notifyGainsUnity_ is set).
|
||||
*/
|
||||
if (notifyGainsUnity_ && controls.contains(libcamera::controls::ColourGains)) {
|
||||
libcamera::Span<const float> colourGains = controls.get(libcamera::controls::ColourGains);
|
||||
libcamera::Span<const float> colourGains =
|
||||
*controls.get(libcamera::controls::ColourGains);
|
||||
/* The control wants linear gains in the order B, Gb, Gr, R. */
|
||||
ControlList ctrls(sensor_->controls());
|
||||
std::array<int32_t, 4> gains{
|
||||
|
@ -2053,7 +2054,7 @@ Rectangle RPiCameraData::scaleIspCrop(const Rectangle &ispCrop) const
|
|||
void RPiCameraData::applyScalerCrop(const ControlList &controls)
|
||||
{
|
||||
if (controls.contains(controls::ScalerCrop)) {
|
||||
Rectangle nativeCrop = controls.get<Rectangle>(controls::ScalerCrop);
|
||||
Rectangle nativeCrop = *controls.get<Rectangle>(controls::ScalerCrop);
|
||||
|
||||
if (!nativeCrop.width || !nativeCrop.height)
|
||||
nativeCrop = { 0, 0, 1, 1 };
|
||||
|
@ -2091,7 +2092,7 @@ void RPiCameraData::fillRequestMetadata(const ControlList &bufferControls,
|
|||
Request *request)
|
||||
{
|
||||
request->metadata().set(controls::SensorTimestamp,
|
||||
bufferControls.get(controls::SensorTimestamp));
|
||||
bufferControls.get(controls::SensorTimestamp).value_or(0));
|
||||
|
||||
request->metadata().set(controls::ScalerCrop, scalerCrop_);
|
||||
}
|
||||
|
|
|
@ -392,7 +392,7 @@ int DNGWriter::write(const char *filename, const Camera *camera,
|
|||
TIFFSetField(tif, TIFFTAG_MAKE, "libcamera");
|
||||
|
||||
if (cameraProperties.contains(properties::Model)) {
|
||||
std::string model = cameraProperties.get(properties::Model);
|
||||
std::string model = *cameraProperties.get(properties::Model);
|
||||
TIFFSetField(tif, TIFFTAG_MODEL, model.c_str());
|
||||
/* \todo set TIFFTAG_UNIQUECAMERAMODEL. */
|
||||
}
|
||||
|
@ -438,16 +438,15 @@ int DNGWriter::write(const char *filename, const Camera *camera,
|
|||
const double eps = 1e-2;
|
||||
|
||||
if (metadata.contains(controls::ColourGains)) {
|
||||
Span<const float> const &colourGains = metadata.get(controls::ColourGains);
|
||||
if (colourGains[0] > eps && colourGains[1] > eps) {
|
||||
wbGain = Matrix3d::diag(colourGains[0], 1, colourGains[1]);
|
||||
neutral[0] = 1.0 / colourGains[0]; /* red */
|
||||
neutral[2] = 1.0 / colourGains[1]; /* blue */
|
||||
const auto &colourGains = metadata.get(controls::ColourGains);
|
||||
if ((*colourGains)[0] > eps && (*colourGains)[1] > eps) {
|
||||
wbGain = Matrix3d::diag((*colourGains)[0], 1, (*colourGains)[1]);
|
||||
neutral[0] = 1.0 / (*colourGains)[0]; /* red */
|
||||
neutral[2] = 1.0 / (*colourGains)[1]; /* blue */
|
||||
}
|
||||
}
|
||||
if (metadata.contains(controls::ColourCorrectionMatrix)) {
|
||||
Span<const float> const &coeffs = metadata.get(controls::ColourCorrectionMatrix);
|
||||
Matrix3d ccmSupplied(coeffs);
|
||||
Matrix3d ccmSupplied(*metadata.get(controls::ColourCorrectionMatrix));
|
||||
if (ccmSupplied.determinant() > eps)
|
||||
ccm = ccmSupplied;
|
||||
}
|
||||
|
@ -515,7 +514,8 @@ int DNGWriter::write(const char *filename, const Camera *camera,
|
|||
uint32_t whiteLevel = (1 << info->bitsPerSample) - 1;
|
||||
|
||||
if (metadata.contains(controls::SensorBlackLevels)) {
|
||||
Span<const int32_t> levels = metadata.get(controls::SensorBlackLevels);
|
||||
Span<const int32_t> levels =
|
||||
*metadata.get(controls::SensorBlackLevels);
|
||||
|
||||
/*
|
||||
* The black levels control is specified in R, Gr, Gb, B order.
|
||||
|
@ -593,13 +593,13 @@ int DNGWriter::write(const char *filename, const Camera *camera,
|
|||
TIFFSetField(tif, EXIFTAG_DATETIMEDIGITIZED, strTime);
|
||||
|
||||
if (metadata.contains(controls::AnalogueGain)) {
|
||||
float gain = metadata.get(controls::AnalogueGain);
|
||||
float gain = *metadata.get(controls::AnalogueGain);
|
||||
uint16_t iso = std::min(std::max(gain * 100, 0.0f), 65535.0f);
|
||||
TIFFSetField(tif, EXIFTAG_ISOSPEEDRATINGS, 1, &iso);
|
||||
}
|
||||
|
||||
if (metadata.contains(controls::ExposureTime)) {
|
||||
float exposureTime = metadata.get(controls::ExposureTime) / 1e6;
|
||||
float exposureTime = *metadata.get(controls::ExposureTime) / 1e6;
|
||||
TIFFSetField(tif, EXIFTAG_EXPOSURETIME, exposureTime);
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue