android: camera_device: Use the new CameraMetadata helper class

Simplify the implementation of metadata handling in the CameraDevice
class by using the new CameraMetadata helper class.

Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Signed-off-by: Jacopo Mondi <jacopo@jmondi.org>
This commit is contained in:
Laurent Pinchart 2019-09-05 03:12:34 +03:00 committed by Jacopo Mondi
parent 9214e2b493
commit 3986009cf1
2 changed files with 223 additions and 331 deletions

View file

@ -7,10 +7,10 @@
#include "camera_device.h"
#include <system/camera_metadata.h>
#include "log.h"
#include "utils.h"
#include "camera_metadata.h"
#include "thread_rpc.h"
using namespace libcamera;
@ -59,12 +59,10 @@ CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
CameraDevice::~CameraDevice()
{
if (staticMetadata_)
free_camera_metadata(staticMetadata_);
staticMetadata_ = nullptr;
delete staticMetadata_;
if (requestTemplate_)
free_camera_metadata(requestTemplate_);
requestTemplate_ = nullptr;
delete requestTemplate_;
}
/*
@ -117,10 +115,8 @@ void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
*/
camera_metadata_t *CameraDevice::getStaticMetadata()
{
int ret;
if (staticMetadata_)
return staticMetadata_;
return staticMetadata_->get();
/*
* The here reported metadata are enough to implement a basic capture
@ -130,18 +126,23 @@ camera_metadata_t *CameraDevice::getStaticMetadata()
/*
* \todo Keep this in sync with the actual number of entries.
* Currently: 46 entries, 390 bytes
* Currently: 47 entries, 390 bytes
*/
staticMetadata_ = allocate_camera_metadata(50, 500);
staticMetadata_ = new CameraMetadata(50, 500);
if (!staticMetadata_->isValid()) {
LOG(HAL, Error) << "Failed to allocate static metadata";
delete staticMetadata_;
staticMetadata_ = nullptr;
return nullptr;
}
/* Color correction static metadata. */
std::vector<uint8_t> aberrationModes = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
aberrationModes.data(), aberrationModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
aberrationModes.data(),
aberrationModes.size());
/* Control static metadata. */
std::vector<uint8_t> aeAvailableAntiBandingModes = {
@ -150,294 +151,228 @@ camera_metadata_t *CameraDevice::getStaticMetadata()
ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
aeAvailableAntiBandingModes.data(),
aeAvailableAntiBandingModes.size());
METADATA_ASSERT(ret);
std::vector<uint8_t> aeAvailableModes = {
ANDROID_CONTROL_AE_MODE_ON,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_AVAILABLE_MODES,
aeAvailableModes.data(), aeAvailableModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
aeAvailableModes.data(),
aeAvailableModes.size());
std::vector<int32_t> availableAeFpsTarget = {
15, 30,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
availableAeFpsTarget.data(),
availableAeFpsTarget.size());
METADATA_ASSERT(ret);
std::vector<int32_t> aeCompensationRange = {
0, 0,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_COMPENSATION_RANGE,
staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
aeCompensationRange.data(),
aeCompensationRange.size());
METADATA_ASSERT(ret);
const camera_metadata_rational_t aeCompensationStep[] = {
{ 0, 1 }
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_COMPENSATION_STEP,
staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
aeCompensationStep, 1);
METADATA_ASSERT(ret);
std::vector<uint8_t> availableAfModes = {
ANDROID_CONTROL_AF_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AF_AVAILABLE_MODES,
availableAfModes.data(), availableAfModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
availableAfModes.data(),
availableAfModes.size());
std::vector<uint8_t> availableEffects = {
ANDROID_CONTROL_EFFECT_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AVAILABLE_EFFECTS,
availableEffects.data(), availableEffects.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
availableEffects.data(),
availableEffects.size());
std::vector<uint8_t> availableSceneModes = {
ANDROID_CONTROL_SCENE_MODE_DISABLED,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
availableSceneModes.data(), availableSceneModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
availableSceneModes.data(),
availableSceneModes.size());
std::vector<uint8_t> availableStabilizationModes = {
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
availableStabilizationModes.data(),
availableStabilizationModes.size());
METADATA_ASSERT(ret);
std::vector<uint8_t> availableAwbModes = {
ANDROID_CONTROL_AWB_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AWB_AVAILABLE_MODES,
availableAwbModes.data(), availableAwbModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
availableAwbModes.data(),
availableAwbModes.size());
std::vector<int32_t> availableMaxRegions = {
0, 0, 0,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_MAX_REGIONS,
availableMaxRegions.data(), availableMaxRegions.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
availableMaxRegions.data(),
availableMaxRegions.size());
std::vector<uint8_t> sceneModesOverride = {
ANDROID_CONTROL_AE_MODE_ON,
ANDROID_CONTROL_AWB_MODE_AUTO,
ANDROID_CONTROL_AF_MODE_AUTO,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
sceneModesOverride.data(), sceneModesOverride.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
sceneModesOverride.data(),
sceneModesOverride.size());
uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AE_LOCK_AVAILABLE,
staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
&aeLockAvailable, 1);
METADATA_ASSERT(ret);
uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
&awbLockAvailable, 1);
METADATA_ASSERT(ret);
char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_CONTROL_AVAILABLE_MODES,
staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
&availableControlModes, 1);
METADATA_ASSERT(ret);
/* JPEG static metadata. */
std::vector<int32_t> availableThumbnailSizes = {
0, 0,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
availableThumbnailSizes.data(),
availableThumbnailSizes.size());
METADATA_ASSERT(ret);
/* Sensor static metadata. */
int32_t pixelArraySize[] = {
2592, 1944,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
&pixelArraySize, 2);
METADATA_ASSERT(ret);
int32_t sensorSizes[] = {
0, 0, 2560, 1920,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
&sensorSizes, 4);
METADATA_ASSERT(ret);
int32_t sensitivityRange[] = {
32, 2400,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
&sensitivityRange, 2);
METADATA_ASSERT(ret);
uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
&filterArr, 1);
METADATA_ASSERT(ret);
int64_t exposureTimeRange[] = {
100000, 200000000,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
&exposureTimeRange, 2);
METADATA_ASSERT(ret);
int32_t orientation = 0;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_ORIENTATION,
staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION,
&orientation, 1);
METADATA_ASSERT(ret);
std::vector<int32_t> testPatterModes = {
ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
testPatterModes.data(), testPatterModes.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
testPatterModes.data(),
testPatterModes.size());
std::vector<float> physicalSize = {
2592, 1944,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
physicalSize.data(), physicalSize.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
physicalSize.data(),
physicalSize.size());
uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
&timestampSource, 1);
METADATA_ASSERT(ret);
/* Statistics static metadata. */
uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
&faceDetectMode, 1);
METADATA_ASSERT(ret);
int32_t maxFaceCount = 0;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
&maxFaceCount, 1);
METADATA_ASSERT(ret);
/* Sync static metadata. */
int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
/* Flash static metadata. */
char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_FLASH_INFO_AVAILABLE,
staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
&flashAvailable, 1);
METADATA_ASSERT(ret);
/* Lens static metadata. */
std::vector<float> lensApertures = {
2.53 / 100,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_INFO_AVAILABLE_APERTURES,
lensApertures.data(), lensApertures.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
lensApertures.data(),
lensApertures.size());
uint8_t lensFacing = ANDROID_LENS_FACING_FRONT;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_FACING, &lensFacing, 1);
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_LENS_FACING, &lensFacing, 1);
std::vector<float> lensFocalLenghts = {
1,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
lensFocalLenghts.data(),
lensFocalLenghts.size());
METADATA_ASSERT(ret);
std::vector<uint8_t> opticalStabilizations = {
ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
opticalStabilizations.data(),
opticalStabilizations.size());
METADATA_ASSERT(ret);
float hypeFocalDistance = 0;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
&hypeFocalDistance, 1);
METADATA_ASSERT(ret);
float minFocusDistance = 0;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
&minFocusDistance, 1);
METADATA_ASSERT(ret);
/* Noise reduction modes. */
uint8_t noiseReductionModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
&noiseReductionModes, 1);
METADATA_ASSERT(ret);
/* Scaler static metadata. */
float maxDigitalZoom = 1;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
&maxDigitalZoom, 1);
METADATA_ASSERT(ret);
std::vector<uint32_t> availableStreamFormats = {
ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_AVAILABLE_FORMATS,
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_FORMATS,
availableStreamFormats.data(),
availableStreamFormats.size());
METADATA_ASSERT(ret);
std::vector<uint32_t> availableStreamConfigurations = {
ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920,
@ -447,65 +382,58 @@ camera_metadata_t *CameraDevice::getStaticMetadata()
ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920,
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
availableStreamConfigurations.data(),
availableStreamConfigurations.size());
METADATA_ASSERT(ret);
std::vector<int64_t> availableStallDurations = {
ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
availableStallDurations.data(),
availableStallDurations.size());
METADATA_ASSERT(ret);
std::vector<int64_t> minFrameDurations = {
ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920, 33333333,
ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 2560, 1920, 33333333,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
minFrameDurations.data(), minFrameDurations.size());
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
minFrameDurations.data(),
minFrameDurations.size());
uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
METADATA_ASSERT(ret);
staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
/* Info static metadata. */
uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
&supportedHWLevel, 1);
/* Request static metadata. */
int32_t partialResultCount = 1;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
&partialResultCount, 1);
METADATA_ASSERT(ret);
uint8_t maxPipelineDepth = 2;
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
&maxPipelineDepth, 1);
METADATA_ASSERT(ret);
std::vector<uint8_t> availableCapabilities = {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
};
ret = add_camera_metadata_entry(staticMetadata_,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
availableCapabilities.data(),
availableCapabilities.size());
METADATA_ASSERT(ret);
return staticMetadata_;
if (!staticMetadata_->isValid()) {
LOG(HAL, Error) << "Failed to construct static metadata";
delete staticMetadata_;
staticMetadata_ = nullptr;
return nullptr;
}
return staticMetadata_->get();
}
/*
@ -513,8 +441,6 @@ camera_metadata_t *CameraDevice::getStaticMetadata()
*/
const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
{
int ret;
/*
* \todo Inspect type and pick the right metadata pack.
* As of now just use a single one for all templates.
@ -545,89 +471,75 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
}
if (requestTemplate_)
return requestTemplate_;
return requestTemplate_->get();
/*
* \todo Keep this in sync with the actual number of entries.
* Currently: 12 entries, 15 bytes
*/
requestTemplate_ = allocate_camera_metadata(15, 20);
requestTemplate_ = new CameraMetadata(15, 20);
if (!requestTemplate_) {
LOG(HAL, Error) << "Failed to allocate template metadata";
delete requestTemplate_;
requestTemplate_ = nullptr;
return nullptr;
}
uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AE_MODE,
requestTemplate_->addEntry(ANDROID_CONTROL_AE_MODE,
&aeMode, 1);
METADATA_ASSERT(ret);
int32_t aeExposureCompensation = 0;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
requestTemplate_->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
&aeExposureCompensation, 1);
METADATA_ASSERT(ret);
uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
requestTemplate_->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
&aePrecaptureTrigger, 1);
METADATA_ASSERT(ret);
uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AE_LOCK,
requestTemplate_->addEntry(ANDROID_CONTROL_AE_LOCK,
&aeLock, 1);
METADATA_ASSERT(ret);
uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AF_TRIGGER,
requestTemplate_->addEntry(ANDROID_CONTROL_AF_TRIGGER,
&afTrigger, 1);
METADATA_ASSERT(ret);
uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AWB_MODE,
requestTemplate_->addEntry(ANDROID_CONTROL_AWB_MODE,
&awbMode, 1);
METADATA_ASSERT(ret);
uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_AWB_LOCK,
requestTemplate_->addEntry(ANDROID_CONTROL_AWB_LOCK,
&awbLock, 1);
METADATA_ASSERT(ret);
uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_FLASH_MODE,
requestTemplate_->addEntry(ANDROID_FLASH_MODE,
&flashMode, 1);
METADATA_ASSERT(ret);
uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_STATISTICS_FACE_DETECT_MODE,
requestTemplate_->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
&faceDetectMode, 1);
METADATA_ASSERT(ret);
uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_NOISE_REDUCTION_MODE, &noiseReduction, 1);
METADATA_ASSERT(ret);
requestTemplate_->addEntry(ANDROID_NOISE_REDUCTION_MODE,
&noiseReduction, 1);
uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
requestTemplate_->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
&aberrationMode, 1);
METADATA_ASSERT(ret);
ret = add_camera_metadata_entry(requestTemplate_,
ANDROID_CONTROL_CAPTURE_INTENT,
requestTemplate_->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
&captureIntent, 1);
METADATA_ASSERT(ret);
return requestTemplate_;
if (!requestTemplate_->isValid()) {
LOG(HAL, Error) << "Failed to construct request template";
delete requestTemplate_;
requestTemplate_ = nullptr;
return nullptr;
}
return requestTemplate_->get();
}
/*
@ -799,7 +711,7 @@ void CameraDevice::requestComplete(Request *request,
{
Buffer *libcameraBuffer = buffers.begin()->second;
camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK;
camera_metadata_t *resultMetadata = nullptr;
std::unique_ptr<CameraMetadata> resultMetadata;
if (request->status() != Request::RequestComplete) {
LOG(HAL, Error) << "Request not succesfully completed: "
@ -826,27 +738,30 @@ void CameraDevice::requestComplete(Request *request,
captureResult.output_buffers =
const_cast<const camera3_stream_buffer_t *>(descriptor->buffers);
if (status == CAMERA3_BUFFER_STATUS_ERROR) {
/* \todo Improve error handling. */
notifyError(descriptor->frameNumber,
descriptor->buffers[0].stream);
} else {
if (status == CAMERA3_BUFFER_STATUS_OK) {
notifyShutter(descriptor->frameNumber,
libcameraBuffer->timestamp());
captureResult.partial_result = 1;
resultMetadata = getResultMetadata(descriptor->frameNumber,
libcameraBuffer->timestamp());
captureResult.result = resultMetadata;
captureResult.result = resultMetadata->get();
}
if (status == CAMERA3_BUFFER_STATUS_ERROR || !captureResult.result) {
/* \todo Improve error handling. In case we notify an error
* because the metadata generation fails, a shutter event has
* already been notified for this frame number before the error
* is here signalled. Make sure the error path plays well with
* the camera stack state machine.
*/
notifyError(descriptor->frameNumber,
descriptor->buffers[0].stream);
}
callbacks_->process_capture_result(callbacks_, &captureResult);
delete descriptor;
if (resultMetadata)
free_camera_metadata(resultMetadata);
return;
}
void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp)
@ -875,89 +790,71 @@ void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
/*
* Produce a set of fixed result metadata.
*/
camera_metadata_t *CameraDevice::getResultMetadata(int frame_number,
std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number,
int64_t timestamp)
{
int ret;
/*
* \todo Keep this in sync with the actual number of entries.
* Currently: 13 entries, 36 bytes
* Currently: 12 entries, 36 bytes
*/
camera_metadata_t *resultMetadata = allocate_camera_metadata(15, 50);
std::unique_ptr<CameraMetadata> resultMetadata =
utils::make_unique<CameraMetadata>(15, 50);
if (!resultMetadata->isValid()) {
LOG(HAL, Error) << "Failed to allocate static metadata";
return nullptr;
}
const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AE_STATE,
&ae_state, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AE_LOCK,
&ae_lock, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
ret = add_camera_metadata_entry(resultMetadata, ANDROID_CONTROL_AF_STATE,
&af_state, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, &af_state, 1);
const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_CONTROL_AWB_STATE,
&awb_state, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &awb_state, 1);
const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_CONTROL_AWB_LOCK,
&awb_lock, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_LENS_STATE,
&lens_state, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_LENS_STATE, &lens_state, 1);
int32_t sensorSizes[] = {
0, 0, 2560, 1920,
};
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_SCALER_CROP_REGION,
sensorSizes, 4);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, sensorSizes, 4);
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_SENSOR_TIMESTAMP,
&timestamp, 1);
METADATA_ASSERT(ret);
resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
/* 33.3 msec */
const int64_t rolling_shutter_skew = 33300000;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
&rolling_shutter_skew, 1);
METADATA_ASSERT(ret);
/* 16.6 msec */
const int64_t exposure_time = 16600000;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_SENSOR_EXPOSURE_TIME,
resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME,
&exposure_time, 1);
METADATA_ASSERT(ret);
const uint8_t lens_shading_map_mode =
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
&lens_shading_map_mode, 1);
METADATA_ASSERT(ret);
const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
ret = add_camera_metadata_entry(resultMetadata,
ANDROID_STATISTICS_SCENE_FLICKER,
resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER,
&scene_flicker, 1);
METADATA_ASSERT(ret);
/*
* Return the result metadata pack even is not valid: get() will return
* nullptr.
*/
if (!resultMetadata->isValid()) {
LOG(HAL, Error) << "Failed to construct result metadata";
}
return resultMetadata;
}

View file

@ -19,13 +19,7 @@
#include "message.h"
#define METADATA_ASSERT(_r) \
do { \
if (!(_r)) break; \
LOG(HAL, Error) << "Error: " << __func__ << ":" << __LINE__; \
return nullptr; \
} while(0);
class CameraMetadata;
class ThreadRpc;
class CameraDevice : public libcamera::Object
@ -59,14 +53,15 @@ private:
void notifyShutter(uint32_t frameNumber, uint64_t timestamp);
void notifyError(uint32_t frameNumber, camera3_stream_t *stream);
camera_metadata_t *getResultMetadata(int frame_number, int64_t timestamp);
std::unique_ptr<CameraMetadata> getResultMetadata(int frame_number,
int64_t timestamp);
bool running_;
std::shared_ptr<libcamera::Camera> camera_;
std::unique_ptr<libcamera::CameraConfiguration> config_;
camera_metadata_t *staticMetadata_;
camera_metadata_t *requestTemplate_;
CameraMetadata *staticMetadata_;
CameraMetadata *requestTemplate_;
const camera3_callback_ops_t *callbacks_;
};