libcamera: pipeline: raspberrypi: Add StreamFormats to StreamConfiguration

In generateConfiguration(), add the device node specific formats to the
StreamConfiguration for each StreamRole requested.

Signed-off-by: Naushir Patuck <naush@raspberrypi.com>
Reviewed-by: Jacopo Mondi <jacopo@jmondi.org>
Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
This commit is contained in:
Naushir Patuck 2020-06-25 08:28:44 +01:00 committed by Laurent Pinchart
parent 23e15e72f9
commit 0396380614

View file

@ -518,41 +518,45 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
RPiCameraData *data = cameraData(camera); RPiCameraData *data = cameraData(camera);
CameraConfiguration *config = new RPiCameraConfiguration(data); CameraConfiguration *config = new RPiCameraConfiguration(data);
V4L2DeviceFormat sensorFormat; V4L2DeviceFormat sensorFormat;
unsigned int bufferCount;
PixelFormat pixelFormat;
V4L2PixFmtMap fmts; V4L2PixFmtMap fmts;
Size size;
if (roles.empty()) if (roles.empty())
return config; return config;
for (const StreamRole role : roles) { for (const StreamRole role : roles) {
StreamConfiguration cfg{};
switch (role) { switch (role) {
case StreamRole::StillCaptureRaw: case StreamRole::StillCaptureRaw:
cfg.size = data->sensor_->resolution(); size = data->sensor_->resolution();
fmts = data->unicam_[Unicam::Image].dev()->formats(); fmts = data->unicam_[Unicam::Image].dev()->formats();
sensorFormat = findBestMode(fmts, cfg.size); sensorFormat = findBestMode(fmts, size);
cfg.pixelFormat = sensorFormat.fourcc.toPixelFormat(); pixelFormat = sensorFormat.fourcc.toPixelFormat();
ASSERT(cfg.pixelFormat.isValid()); ASSERT(pixelFormat.isValid());
cfg.bufferCount = 1; bufferCount = 1;
break; break;
case StreamRole::StillCapture: case StreamRole::StillCapture:
cfg.pixelFormat = formats::NV12; fmts = data->isp_[Isp::Output0].dev()->formats();
pixelFormat = formats::NV12;
/* Return the largest sensor resolution. */ /* Return the largest sensor resolution. */
cfg.size = data->sensor_->resolution(); size = data->sensor_->resolution();
cfg.bufferCount = 1; bufferCount = 1;
break; break;
case StreamRole::VideoRecording: case StreamRole::VideoRecording:
cfg.pixelFormat = formats::NV12; fmts = data->isp_[Isp::Output0].dev()->formats();
cfg.size = { 1920, 1080 }; pixelFormat = formats::NV12;
cfg.bufferCount = 4; size = { 1920, 1080 };
bufferCount = 4;
break; break;
case StreamRole::Viewfinder: case StreamRole::Viewfinder:
cfg.pixelFormat = formats::ARGB8888; fmts = data->isp_[Isp::Output0].dev()->formats();
cfg.size = { 800, 600 }; pixelFormat = formats::ARGB8888;
cfg.bufferCount = 4; size = { 800, 600 };
bufferCount = 4;
break; break;
default: default:
@ -561,6 +565,22 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
break; break;
} }
/* Translate the V4L2PixelFormat to PixelFormat. */
std::map<PixelFormat, std::vector<SizeRange>> deviceFormats;
std::transform(fmts.begin(), fmts.end(), std::inserter(deviceFormats, deviceFormats.end()),
[&](const decltype(fmts)::value_type &format) {
return decltype(deviceFormats)::value_type{
format.first.toPixelFormat(),
format.second
};
});
/* Add the stream format based on the device node used for the use case. */
StreamFormats formats(deviceFormats);
StreamConfiguration cfg(formats);
cfg.size = size;
cfg.pixelFormat = pixelFormat;
cfg.bufferCount = bufferCount;
config->addConfiguration(cfg); config->addConfiguration(cfg);
} }