libcamera: rpi: Simplify validate() and configure() for RAW streams

This commit simplifies the validate() and configure() calls in the
pipeline handler in a number of ways:

- Only pass the RPiCameraConfiguration structure into platformValidate()
  and platformConfigure().

- Determine the V4L2DeviceFormat structure fields for all streams in
  validate(), cache them and reuse in configure() instead of
  re-generating this structure multiple times.

- Use the recently added updateStreamConfig() and toV4L2DeviceFormat()
  helpers to populate fields in the V4L2DeviceFormat and
  StreamConfiguration structures to reduce code duplication.

Signed-off-by: Naushir Patuck <naush@raspberrypi.com>
Reviewed-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com>
Signed-off-by: Jacopo Mondi <jacopo.mondi@ideasonboard.com>
Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
This commit is contained in:
Naushir Patuck 2023-09-21 18:55:47 +02:00 committed by Laurent Pinchart
parent 71bbf10a2f
commit 723c0abb40
3 changed files with 34 additions and 54 deletions

View file

@ -238,16 +238,10 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
/* Further fixups on the RAW streams. */ /* Further fixups on the RAW streams. */
for (auto &raw : rawStreams_) { for (auto &raw : rawStreams_) {
StreamConfiguration &cfg = config_.at(raw.index); int ret = raw.dev->tryFormat(&raw.format);
V4L2DeviceFormat rawFormat;
rawFormat.fourcc = raw.dev->toV4L2PixelFormat(cfg.pixelFormat);
rawFormat.size = cfg.size;
rawFormat.colorSpace = cfg.colorSpace;
int ret = raw.dev->tryFormat(&rawFormat);
if (ret) if (ret)
return Invalid; return Invalid;
/* /*
* Some sensors change their Bayer order when they are h-flipped * Some sensors change their Bayer order when they are h-flipped
* or v-flipped, according to the transform. If this one does, we * or v-flipped, according to the transform. If this one does, we
@ -255,23 +249,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
* Note how we must fetch the "native" (i.e. untransformed) Bayer * Note how we must fetch the "native" (i.e. untransformed) Bayer
* order, because the sensor may currently be flipped! * order, because the sensor may currently be flipped!
*/ */
V4L2PixelFormat fourcc = rawFormat.fourcc; BayerFormat bayer = BayerFormat::fromPixelFormat(raw.cfg->pixelFormat);
if (data_->flipsAlterBayerOrder_) { if (data_->flipsAlterBayerOrder_) {
BayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc);
bayer.order = data_->nativeBayerOrder_; bayer.order = data_->nativeBayerOrder_;
bayer = bayer.transform(combinedTransform_); bayer = bayer.transform(combinedTransform_);
fourcc = bayer.toV4L2PixelFormat();
} }
raw.cfg->pixelFormat = bayer.toPixelFormat();
PixelFormat inputPixFormat = fourcc.toPixelFormat(); if (RPi::PipelineHandlerBase::updateStreamConfig(raw.cfg, raw.format))
if (raw.cfg->size != rawFormat.size || raw.cfg->pixelFormat != inputPixFormat) {
raw.cfg->size = rawFormat.size;
raw.cfg->pixelFormat = inputPixFormat;
status = Adjusted; status = Adjusted;
}
raw.cfg->stride = rawFormat.planes[0].bpl;
raw.cfg->frameSize = rawFormat.planes[0].size;
} }
/* Further fixups on the ISP output streams. */ /* Further fixups on the ISP output streams. */
@ -548,35 +534,25 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config)
* If the application has provided a sensor configuration apply it * If the application has provided a sensor configuration apply it
* instead of just applying a format. * instead of just applying a format.
*/ */
const RPiCameraConfiguration *rpiConfig = RPiCameraConfiguration *rpiConfig = static_cast<RPiCameraConfiguration *>(config);
static_cast<const RPiCameraConfiguration *>(config); V4L2SubdeviceFormat *sensorFormat = &rpiConfig->sensorFormat_;
V4L2SubdeviceFormat sensorFormat;
if (rpiConfig->sensorConfig) { if (rpiConfig->sensorConfig) {
ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig, ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig,
rpiConfig->combinedTransform_, rpiConfig->combinedTransform_,
&sensorFormat); sensorFormat);
} else { } else {
sensorFormat = rpiConfig->sensorFormat_; ret = data->sensor_->setFormat(sensorFormat,
ret = data->sensor_->setFormat(&sensorFormat,
rpiConfig->combinedTransform_); rpiConfig->combinedTransform_);
} }
if (ret) if (ret)
return ret; return ret;
/* Use the user requested packing/bit-depth. */
std::optional<BayerFormat::Packing> packing;
if (!rpiConfig->rawStreams_.empty()) {
BayerFormat bayerFormat =
BayerFormat::fromPixelFormat(rpiConfig->rawStreams_[0].cfg->pixelFormat);
packing = bayerFormat.packing;
}
/* /*
* Platform specific internal stream configuration. This also assigns * Platform specific internal stream configuration. This also assigns
* external streams which get configured below. * external streams which get configured below.
*/ */
ret = data->platformConfigure(sensorFormat, packing, rpiConfig); ret = data->platformConfigure(rpiConfig);
if (ret) if (ret)
return ret; return ret;
@ -640,11 +616,11 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config)
*/ */
link->setEnabled(true); link->setEnabled(true);
const MediaPad *sinkPad = link->sink(); const MediaPad *sinkPad = link->sink();
ret = device->setFormat(sinkPad->index(), &sensorFormat); ret = device->setFormat(sinkPad->index(), sensorFormat);
if (ret) { if (ret) {
LOG(RPI, Error) << "Failed to set format on " << device->entity()->name() LOG(RPI, Error) << "Failed to set format on " << device->entity()->name()
<< " pad " << sinkPad->index() << " pad " << sinkPad->index()
<< " with format " << sensorFormat << " with format " << *sensorFormat
<< ": " << ret; << ": " << ret;
return ret; return ret;
} }

View file

@ -58,9 +58,7 @@ public:
} }
virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig) const = 0; virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig) const = 0;
virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, virtual int platformConfigure(const RPiCameraConfiguration *rpiConfig) = 0;
std::optional<BayerFormat::Packing> packing,
const RPiCameraConfiguration *rpiConfig) = 0;
virtual void platformStart() = 0; virtual void platformStart() = 0;
virtual void platformStop() = 0; virtual void platformStop() = 0;
@ -269,6 +267,7 @@ public:
unsigned int index; unsigned int index;
StreamConfiguration *cfg; StreamConfiguration *cfg;
V4L2VideoDevice *dev; V4L2VideoDevice *dev;
V4L2DeviceFormat format;
}; };
std::vector<StreamParams> rawStreams_; std::vector<StreamParams> rawStreams_;

View file

@ -115,9 +115,7 @@ private:
isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &ispCrop_); isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &ispCrop_);
} }
int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, int platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig) override;
std::optional<BayerFormat::Packing> packing,
const RPi::RPiCameraConfiguration *rpiConfig) override;
int platformConfigureIpa(ipa::RPi::ConfigParams &params) override; int platformConfigureIpa(ipa::RPi::ConfigParams &params) override;
int platformInitIpa([[maybe_unused]] ipa::RPi::InitParams &params) override int platformInitIpa([[maybe_unused]] ipa::RPi::InitParams &params) override
@ -416,6 +414,9 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfig
/* Handle flips to make sure to match the RAW stream format. */ /* Handle flips to make sure to match the RAW stream format. */
if (flipsAlterBayerOrder_) if (flipsAlterBayerOrder_)
rawBayer = rawBayer.transform(rpiConfig->combinedTransform_); rawBayer = rawBayer.transform(rpiConfig->combinedTransform_);
/* Apply the user requested packing. */
rawBayer.packing = BayerFormat::fromPixelFormat(rawStream->pixelFormat).packing;
PixelFormat rawFormat = rawBayer.toPixelFormat(); PixelFormat rawFormat = rawBayer.toPixelFormat();
if (rawStream->pixelFormat != rawFormat || if (rawStream->pixelFormat != rawFormat ||
@ -425,6 +426,9 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfig
status = CameraConfiguration::Adjusted; status = CameraConfiguration::Adjusted;
} }
rawStreams[0].format =
RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam_[Unicam::Image].dev(), rawStream);
} }
/* /*
@ -503,23 +507,14 @@ int Vc4CameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject> &
return 0; return 0;
} }
int Vc4CameraData::platformConfigure(const V4L2SubdeviceFormat &sensorFormat, int Vc4CameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig)
std::optional<BayerFormat::Packing> packing,
const RPi::RPiCameraConfiguration *rpiConfig)
{ {
const std::vector<StreamParams> &rawStreams = rpiConfig->rawStreams_; const std::vector<StreamParams> &rawStreams = rpiConfig->rawStreams_;
const std::vector<StreamParams> &outStreams = rpiConfig->outStreams_; const std::vector<StreamParams> &outStreams = rpiConfig->outStreams_;
int ret; int ret;
if (!packing)
packing = BayerFormat::Packing::CSI2;
V4L2VideoDevice *unicam = unicam_[Unicam::Image].dev(); V4L2VideoDevice *unicam = unicam_[Unicam::Image].dev();
V4L2DeviceFormat unicamFormat = RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam, sensorFormat, *packing); V4L2DeviceFormat unicamFormat;
ret = unicam->setFormat(&unicamFormat);
if (ret)
return ret;
/* /*
* See which streams are requested, and route the user * See which streams are requested, and route the user
@ -528,14 +523,24 @@ int Vc4CameraData::platformConfigure(const V4L2SubdeviceFormat &sensorFormat,
if (!rawStreams.empty()) { if (!rawStreams.empty()) {
rawStreams[0].cfg->setStream(&unicam_[Unicam::Image]); rawStreams[0].cfg->setStream(&unicam_[Unicam::Image]);
unicam_[Unicam::Image].setFlags(StreamFlag::External); unicam_[Unicam::Image].setFlags(StreamFlag::External);
unicamFormat = rawStreams[0].format;
} else {
unicamFormat =
RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam,
rpiConfig->sensorFormat_,
BayerFormat::Packing::CSI2);
} }
ret = unicam->setFormat(&unicamFormat);
if (ret)
return ret;
ret = isp_[Isp::Input].dev()->setFormat(&unicamFormat); ret = isp_[Isp::Input].dev()->setFormat(&unicamFormat);
if (ret) if (ret)
return ret; return ret;
LOG(RPI, Info) << "Sensor: " << sensor_->id() LOG(RPI, Info) << "Sensor: " << sensor_->id()
<< " - Selected sensor format: " << sensorFormat << " - Selected sensor format: " << rpiConfig->sensorFormat_
<< " - Selected unicam format: " << unicamFormat; << " - Selected unicam format: " << unicamFormat;
/* Use a sensible small default size if no output streams are configured. */ /* Use a sensible small default size if no output streams are configured. */