android: post_processor: Change the type destination in process()

The type of the destination buffer in PostProcessor::process() is
libcamera::Span. libcamera::Span is used for one dimension buffer
(e.g. blob buffer). The destination can be multiple dimensions
buffer (e.g. yuv frame). Therefore, this changes the type of the
destination buffer to MappedFrameBuffer.

Signed-off-by: Hirokazu Honda <hiroh@chromium.org>
Reviewed-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Reviewed-by: Jacopo Mondi <jacopo@jmondi.org>
Signed-off-by: Jacopo Mondi <jacopo@jmondi.org>
This commit is contained in:
Hirokazu Honda 2021-01-28 22:42:15 +00:00 committed by Jacopo Mondi
parent bc6440792b
commit fb9051ff74
5 changed files with 13 additions and 10 deletions

View file

@ -96,14 +96,14 @@ int CameraStream::configure()
}
int CameraStream::process(const libcamera::FrameBuffer &source,
MappedCamera3Buffer *dest,
libcamera::MappedBuffer *destination,
const CameraMetadata &requestMetadata,
CameraMetadata *resultMetadata)
{
if (!postProcessor_)
return 0;
return postProcessor_->process(source, dest->maps()[0],
return postProcessor_->process(source, destination,
requestMetadata, resultMetadata);
}

View file

@ -19,9 +19,10 @@
#include <libcamera/geometry.h>
#include <libcamera/pixel_format.h>
#include <libcamera/internal/buffer.h>
class CameraDevice;
class CameraMetadata;
class MappedCamera3Buffer;
class PostProcessor;
class CameraStream
@ -119,7 +120,7 @@ public:
int configure();
int process(const libcamera::FrameBuffer &source,
MappedCamera3Buffer *dest,
libcamera::MappedBuffer *destination,
const CameraMetadata &requestMetadata,
CameraMetadata *resultMetadata);
libcamera::FrameBuffer *getBuffer();

View file

@ -83,7 +83,7 @@ void PostProcessorJpeg::generateThumbnail(const FrameBuffer &source,
}
int PostProcessorJpeg::process(const FrameBuffer &source,
Span<uint8_t> destination,
libcamera::MappedBuffer *destination,
const CameraMetadata &requestMetadata,
CameraMetadata *resultMetadata)
{
@ -172,7 +172,8 @@ int PostProcessorJpeg::process(const FrameBuffer &source,
const uint8_t quality = ret ? *entry.data.u8 : 95;
resultMetadata->addEntry(ANDROID_JPEG_QUALITY, &quality, 1);
int jpeg_size = encoder_->encode(source, destination, exif.data(), quality);
int jpeg_size = encoder_->encode(source, destination->maps()[0],
exif.data(), quality);
if (jpeg_size < 0) {
LOG(JPEG, Error) << "Failed to encode stream image";
return jpeg_size;
@ -190,7 +191,7 @@ int PostProcessorJpeg::process(const FrameBuffer &source,
* \todo Investigate if the buffer size mismatch is an issue or
* expected behaviour.
*/
uint8_t *resultPtr = destination.data() +
uint8_t *resultPtr = destination->maps()[0].data() +
cameraDevice_->maxJpegBufferSize() -
sizeof(struct camera3_jpeg_blob);
auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);

View file

@ -25,7 +25,7 @@ public:
int configure(const libcamera::StreamConfiguration &incfg,
const libcamera::StreamConfiguration &outcfg) override;
int process(const libcamera::FrameBuffer &source,
libcamera::Span<uint8_t> destination,
libcamera::MappedBuffer *destination,
const CameraMetadata &requestMetadata,
CameraMetadata *resultMetadata) override;

View file

@ -8,9 +8,10 @@
#define __ANDROID_POST_PROCESSOR_H__
#include <libcamera/buffer.h>
#include <libcamera/span.h>
#include <libcamera/stream.h>
#include <libcamera/internal/buffer.h>
class CameraMetadata;
class PostProcessor
@ -21,7 +22,7 @@ public:
virtual int configure(const libcamera::StreamConfiguration &inCfg,
const libcamera::StreamConfiguration &outCfg) = 0;
virtual int process(const libcamera::FrameBuffer &source,
libcamera::Span<uint8_t> destination,
libcamera::MappedBuffer *destination,
const CameraMetadata &requestMetadata,
CameraMetadata *resultMetadata) = 0;
};