android: capabilities: Use a throw-away config for YUV stream building
When building the list of supported YUV streams in getYUVResolutions() the CameraConfiguration provided by the caller as first parameters is used. As the CameraConfiguration will be later actually applied to the Camera, avoid any possible overlap of the configuration parameters by using a throw-away CameraConfiguration generated for the Viewfinder stream role in getYUVResolutions(). It's also nicer to avoid having two functions with a similar purpose such as getYUVResolutions() and getRawResolutions() with different parameter lists, as the presence of a CameraConfiguration as first parameter might be confusing to the reader. Signed-off-by: Jacopo Mondi <jacopo@jmondi.org> Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
This commit is contained in:
parent
9c2f6b973c
commit
e1d43481b9
2 changed files with 6 additions and 7 deletions
|
@ -138,13 +138,14 @@ int CameraCapabilities::initialize(std::shared_ptr<libcamera::Camera> camera,
|
||||||
return initializeStaticMetadata();
|
return initializeStaticMetadata();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Size> CameraCapabilities::getYUVResolutions(CameraConfiguration *cameraConfig,
|
std::vector<Size> CameraCapabilities::getYUVResolutions(const PixelFormat &pixelFormat,
|
||||||
const PixelFormat &pixelFormat,
|
|
||||||
const std::vector<Size> &resolutions)
|
const std::vector<Size> &resolutions)
|
||||||
{
|
{
|
||||||
std::vector<Size> supportedResolutions;
|
std::vector<Size> supportedResolutions;
|
||||||
|
std::unique_ptr<CameraConfiguration> cameraConfig =
|
||||||
|
camera_->generateConfiguration({ StreamRole::Viewfinder });
|
||||||
StreamConfiguration &cfg = cameraConfig->at(0);
|
StreamConfiguration &cfg = cameraConfig->at(0);
|
||||||
|
|
||||||
for (const Size &res : resolutions) {
|
for (const Size &res : resolutions) {
|
||||||
cfg.pixelFormat = pixelFormat;
|
cfg.pixelFormat = pixelFormat;
|
||||||
cfg.size = res;
|
cfg.size = res;
|
||||||
|
@ -324,8 +325,7 @@ int CameraCapabilities::initializeStreamConfigurations()
|
||||||
if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
|
if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
|
||||||
resolutions = getRawResolutions(mappedFormat);
|
resolutions = getRawResolutions(mappedFormat);
|
||||||
else
|
else
|
||||||
resolutions = getYUVResolutions(cameraConfig.get(),
|
resolutions = getYUVResolutions(mappedFormat,
|
||||||
mappedFormat,
|
|
||||||
cameraResolutions);
|
cameraResolutions);
|
||||||
|
|
||||||
for (const Size &res : resolutions) {
|
for (const Size &res : resolutions) {
|
||||||
|
|
|
@ -43,8 +43,7 @@ private:
|
||||||
};
|
};
|
||||||
|
|
||||||
std::vector<libcamera::Size>
|
std::vector<libcamera::Size>
|
||||||
getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
|
getYUVResolutions(const libcamera::PixelFormat &pixelFormat,
|
||||||
const libcamera::PixelFormat &pixelFormat,
|
|
||||||
const std::vector<libcamera::Size> &resolutions);
|
const std::vector<libcamera::Size> &resolutions);
|
||||||
std::vector<libcamera::Size>
|
std::vector<libcamera::Size>
|
||||||
getRawResolutions(const libcamera::PixelFormat &pixelFormat);
|
getRawResolutions(const libcamera::PixelFormat &pixelFormat);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue