qcam: viewfinder_gl: Take color space into account for YUV rendering

Update the YUV shaders and the viewfinder_gl to correctly take the
Y'CbCr encoding and the quantization range into account when rendering
YUV formats to RGB. Support for the primaries and transfer function will
be added in a subsequent step.

Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Reviewed-by: Umang Jain <umang.jain@ideasonboard.com>
Reviewed-by: Kunal Agarwal <kunalagarwal1072002@gmail.com>
This commit is contained in:
Laurent Pinchart 2022-08-28 03:40:25 +03:00
parent ee4681b7e8
commit 251f0534b7
5 changed files with 115 additions and 33 deletions

View file

@ -13,27 +13,30 @@ varying vec2 textureOut;
uniform sampler2D tex_y; uniform sampler2D tex_y;
uniform sampler2D tex_u; uniform sampler2D tex_u;
const mat3 yuv2rgb_matrix = mat3(
YUV2RGB_MATRIX
);
const vec3 yuv2rgb_offset = vec3(
YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
);
void main(void) void main(void)
{ {
vec3 yuv; vec3 yuv;
vec3 rgb;
mat3 yuv2rgb_bt601_mat = mat3(
vec3(1.164, 1.164, 1.164),
vec3(0.000, -0.392, 2.017),
vec3(1.596, -0.813, 0.000)
);
yuv.x = texture2D(tex_y, textureOut).r - 0.063; yuv.x = texture2D(tex_y, textureOut).r;
#if defined(YUV_PATTERN_UV) #if defined(YUV_PATTERN_UV)
yuv.y = texture2D(tex_u, textureOut).r - 0.500; yuv.y = texture2D(tex_u, textureOut).r;
yuv.z = texture2D(tex_u, textureOut).a - 0.500; yuv.z = texture2D(tex_u, textureOut).a;
#elif defined(YUV_PATTERN_VU) #elif defined(YUV_PATTERN_VU)
yuv.y = texture2D(tex_u, textureOut).a - 0.500; yuv.y = texture2D(tex_u, textureOut).a;
yuv.z = texture2D(tex_u, textureOut).r - 0.500; yuv.z = texture2D(tex_u, textureOut).r;
#else #else
#error Invalid pattern #error Invalid pattern
#endif #endif
rgb = yuv2rgb_bt601_mat * yuv; vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
gl_FragColor = vec4(rgb, 1.0); gl_FragColor = vec4(rgb, 1.0);
} }

View file

@ -14,20 +14,23 @@ uniform sampler2D tex_y;
uniform sampler2D tex_u; uniform sampler2D tex_u;
uniform sampler2D tex_v; uniform sampler2D tex_v;
const mat3 yuv2rgb_matrix = mat3(
YUV2RGB_MATRIX
);
const vec3 yuv2rgb_offset = vec3(
YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
);
void main(void) void main(void)
{ {
vec3 yuv; vec3 yuv;
vec3 rgb;
mat3 yuv2rgb_bt601_mat = mat3(
vec3(1.164, 1.164, 1.164),
vec3(0.000, -0.392, 2.017),
vec3(1.596, -0.813, 0.000)
);
yuv.x = texture2D(tex_y, textureOut).r - 0.063; yuv.x = texture2D(tex_y, textureOut).r;
yuv.y = texture2D(tex_u, textureOut).r - 0.500; yuv.y = texture2D(tex_u, textureOut).r;
yuv.z = texture2D(tex_v, textureOut).r - 0.500; yuv.z = texture2D(tex_v, textureOut).r;
vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
rgb = yuv2rgb_bt601_mat * yuv;
gl_FragColor = vec4(rgb, 1.0); gl_FragColor = vec4(rgb, 1.0);
} }

View file

@ -14,15 +14,16 @@ varying vec2 textureOut;
uniform sampler2D tex_y; uniform sampler2D tex_y;
uniform vec2 tex_step; uniform vec2 tex_step;
const mat3 yuv2rgb_matrix = mat3(
YUV2RGB_MATRIX
);
const vec3 yuv2rgb_offset = vec3(
YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
);
void main(void) void main(void)
{ {
mat3 yuv2rgb_bt601_mat = mat3(
vec3(1.164, 1.164, 1.164),
vec3(0.000, -0.392, 2.017),
vec3(1.596, -0.813, 0.000)
);
vec3 yuv2rgb_bt601_offset = vec3(0.063, 0.500, 0.500);
/* /*
* The sampler won't interpolate the texture correctly along the X axis, * The sampler won't interpolate the texture correctly along the X axis,
* as each RGBA pixel effectively stores two pixels. We thus need to * as each RGBA pixel effectively stores two pixels. We thus need to
@ -76,7 +77,7 @@ void main(void)
float y = mix(y_left, y_right, step(0.5, f_x)); float y = mix(y_left, y_right, step(0.5, f_x));
vec3 rgb = yuv2rgb_bt601_mat * (vec3(y, uv) - yuv2rgb_bt601_offset); vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
gl_FragColor = vec4(rgb, 1.0); gl_FragColor = vec4(rgb, 1.0);
} }

View file

@ -7,9 +7,12 @@
#include "viewfinder_gl.h" #include "viewfinder_gl.h"
#include <array>
#include <QByteArray> #include <QByteArray>
#include <QFile> #include <QFile>
#include <QImage> #include <QImage>
#include <QStringList>
#include <libcamera/formats.h> #include <libcamera/formats.h>
@ -56,7 +59,8 @@ static const QList<libcamera::PixelFormat> supportedFormats{
}; };
ViewFinderGL::ViewFinderGL(QWidget *parent) ViewFinderGL::ViewFinderGL(QWidget *parent)
: QOpenGLWidget(parent), buffer_(nullptr), image_(nullptr), : QOpenGLWidget(parent), buffer_(nullptr),
colorSpace_(libcamera::ColorSpace::Raw), image_(nullptr),
vertexBuffer_(QOpenGLBuffer::VertexBuffer) vertexBuffer_(QOpenGLBuffer::VertexBuffer)
{ {
} }
@ -72,10 +76,10 @@ const QList<libcamera::PixelFormat> &ViewFinderGL::nativeFormats() const
} }
int ViewFinderGL::setFormat(const libcamera::PixelFormat &format, const QSize &size, int ViewFinderGL::setFormat(const libcamera::PixelFormat &format, const QSize &size,
[[maybe_unused]] const libcamera::ColorSpace &colorSpace, const libcamera::ColorSpace &colorSpace,
unsigned int stride) unsigned int stride)
{ {
if (format != format_) { if (format != format_ || colorSpace != colorSpace_) {
/* /*
* If the fragment already exists, remove it and create a new * If the fragment already exists, remove it and create a new
* one for the new format. * one for the new format.
@ -89,7 +93,10 @@ int ViewFinderGL::setFormat(const libcamera::PixelFormat &format, const QSize &s
if (!selectFormat(format)) if (!selectFormat(format))
return -1; return -1;
selectColorSpace(colorSpace);
format_ = format; format_ = format;
colorSpace_ = colorSpace;
} }
size_ = size; size_ = size;
@ -318,6 +325,72 @@ bool ViewFinderGL::selectFormat(const libcamera::PixelFormat &format)
return ret; return ret;
} }
void ViewFinderGL::selectColorSpace(const libcamera::ColorSpace &colorSpace)
{
std::array<double, 9> yuv2rgb;
/* OpenGL stores arrays in column-major order. */
switch (colorSpace.ycbcrEncoding) {
case libcamera::ColorSpace::YcbcrEncoding::None:
yuv2rgb = {
1.0000, 0.0000, 0.0000,
0.0000, 1.0000, 0.0000,
0.0000, 0.0000, 1.0000,
};
break;
case libcamera::ColorSpace::YcbcrEncoding::Rec601:
yuv2rgb = {
1.0000, 1.0000, 1.0000,
0.0000, -0.3441, 1.7720,
1.4020, -0.7141, 0.0000,
};
break;
case libcamera::ColorSpace::YcbcrEncoding::Rec709:
yuv2rgb = {
1.0000, 1.0000, 1.0000,
0.0000, -0.1873, 1.8856,
1.5748, -0.4681, 0.0000,
};
break;
case libcamera::ColorSpace::YcbcrEncoding::Rec2020:
yuv2rgb = {
1.0000, 1.0000, 1.0000,
0.0000, -0.1646, 1.8814,
1.4746, -0.5714, 0.0000,
};
break;
}
double offset;
switch (colorSpace.range) {
case libcamera::ColorSpace::Range::Full:
offset = 0.0;
break;
case libcamera::ColorSpace::Range::Limited:
offset = 16.0;
for (unsigned int i = 0; i < 3; ++i)
yuv2rgb[i] *= 255.0 / 219.0;
for (unsigned int i = 4; i < 9; ++i)
yuv2rgb[i] *= 255.0 / 224.0;
break;
}
QStringList matrix;
for (double coeff : yuv2rgb)
matrix.append(QString::number(coeff, 'f'));
fragmentShaderDefines_.append("#define YUV2RGB_MATRIX " + matrix.join(", "));
fragmentShaderDefines_.append(QString("#define YUV2RGB_Y_OFFSET %1")
.arg(offset, 0, 'f', 1));
}
bool ViewFinderGL::createVertexShader() bool ViewFinderGL::createVertexShader()
{ {
/* Create Vertex Shader */ /* Create Vertex Shader */

View file

@ -57,6 +57,7 @@ protected:
private: private:
bool selectFormat(const libcamera::PixelFormat &format); bool selectFormat(const libcamera::PixelFormat &format);
void selectColorSpace(const libcamera::ColorSpace &colorSpace);
void configureTexture(QOpenGLTexture &texture); void configureTexture(QOpenGLTexture &texture);
bool createFragmentShader(); bool createFragmentShader();
@ -67,6 +68,7 @@ private:
/* Captured image size, format and buffer */ /* Captured image size, format and buffer */
libcamera::FrameBuffer *buffer_; libcamera::FrameBuffer *buffer_;
libcamera::PixelFormat format_; libcamera::PixelFormat format_;
libcamera::ColorSpace colorSpace_;
QSize size_; QSize size_;
unsigned int stride_; unsigned int stride_;
Image *image_; Image *image_;