Run clang-format on GLSL

PiperOrigin-RevId: 534015933
(cherry picked from commit 2cbc2c6176)
This commit is contained in:
andrewlewis 2023-05-22 11:47:16 +01:00 committed by Tofunmi Adigun-Hameed
parent 5c42c25ad3
commit aa4f84d89a
11 changed files with 206 additions and 231 deletions

View file

@ -34,44 +34,41 @@ varying vec2 vTexSamplingCoord;
const float epsilon = 1e-10;
vec3 rgbToHcv(vec3 rgb) {
vec4 p = (rgb.g < rgb.b)
? vec4(rgb.bg, -1.0, 2.0 / 3.0)
: vec4(rgb.gb, 0.0, -1.0 / 3.0);
vec4 q = (rgb.r < p.x)
? vec4(p.xyw, rgb.r)
: vec4(rgb.r, p.yzx);
float c = q.x - min(q.w, q.y);
float h = abs((q.w - q.y) / (6.0 * c + epsilon) + q.z);
return vec3(h, c, q.x);
vec4 p = (rgb.g < rgb.b) ? vec4(rgb.bg, -1.0, 2.0 / 3.0)
: vec4(rgb.gb, 0.0, -1.0 / 3.0);
vec4 q = (rgb.r < p.x) ? vec4(p.xyw, rgb.r) : vec4(rgb.r, p.yzx);
float c = q.x - min(q.w, q.y);
float h = abs((q.w - q.y) / (6.0 * c + epsilon) + q.z);
return vec3(h, c, q.x);
}
vec3 rgbToHsl(vec3 rgb) {
vec3 hcv = rgbToHcv(rgb);
float l = hcv.z - hcv.y * 0.5;
float s = hcv.y / (1.0 - abs(l * 2.0 - 1.0) + epsilon);
return vec3(hcv.x, s, l);
vec3 hcv = rgbToHcv(rgb);
float l = hcv.z - hcv.y * 0.5;
float s = hcv.y / (1.0 - abs(l * 2.0 - 1.0) + epsilon);
return vec3(hcv.x, s, l);
}
vec3 hueToRgb(float hue) {
float r = abs(hue * 6.0 - 3.0) - 1.0;
float g = 2.0 - abs(hue * 6.0 - 2.0);
float b = 2.0 - abs(hue * 6.0 - 4.0);
return clamp(vec3(r, g, b), 0.0, 1.0);
float r = abs(hue * 6.0 - 3.0) - 1.0;
float g = 2.0 - abs(hue * 6.0 - 2.0);
float b = 2.0 - abs(hue * 6.0 - 4.0);
return clamp(vec3(r, g, b), 0.0, 1.0);
}
vec3 hslToRgb(vec3 hsl) {
vec3 rgb = hueToRgb(hsl.x);
float c = (1.0 - abs(2.0 * hsl.z - 1.0)) * hsl.y;
return (rgb - 0.5) * c + hsl.z;
vec3 rgb = hueToRgb(hsl.x);
float c = (1.0 - abs(2.0 * hsl.z - 1.0)) * hsl.y;
return (rgb - 0.5) * c + hsl.z;
}
void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec3 hslColor = rgbToHsl(inputColor.rgb);
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec3 hslColor = rgbToHsl(inputColor.rgb);
hslColor.x = mod(hslColor.x + uHueAdjustmentDegrees, 1.0);
hslColor.y = clamp(hslColor.y + uSaturationAdjustment, 0.0, 1.0);
hslColor.z = clamp(hslColor.z + uLightnessAdjustment, 0.0, 1.0);
hslColor.x = mod(hslColor.x + uHueAdjustmentDegrees, 1.0);
hslColor.y = clamp(hslColor.y + uSaturationAdjustment, 0.0, 1.0);
hslColor.z = clamp(hslColor.z + uLightnessAdjustment, 0.0, 1.0);
gl_FragColor = vec4(hslToRgb(hslColor), inputColor.a);
gl_FragColor = vec4(hslToRgb(hslColor), inputColor.a);
}

View file

@ -31,69 +31,67 @@ varying vec2 vTexSamplingCoord;
// Applies the color lookup using uLut based on the input colors.
vec3 applyLookup(vec3 color) {
// Reminder: Inside OpenGL vector.xyz is the same as vector.rgb.
// Here we use mentions of x and y coordinates to references to
// the position to sample from inside the 2D LUT plane and
// rgb to create the 3D coordinates based on the input colors.
// Reminder: Inside OpenGL vector.xyz is the same as vector.rgb.
// Here we use mentions of x and y coordinates to references to
// the position to sample from inside the 2D LUT plane and
// rgb to create the 3D coordinates based on the input colors.
// To sample from the 3D LUT we interpolate bilinearly twice in the 2D LUT
// to replicate the trilinear interpolation in a 3D LUT. Thus we sample
// from the plane of position redCoordLow and on the plane above.
// redCoordLow points to the lower plane to sample from.
float redCoord = color.r * (uColorLutLength - 1.0);
// Clamping to uColorLutLength - 2 is only needed if redCoord points to the
// most upper plane. In this case there would not be any plane above
// available to sample from.
float redCoordLow = clamp(floor(redCoord), 0.0, uColorLutLength - 2.0);
// To sample from the 3D LUT we interpolate bilinearly twice in the 2D LUT
// to replicate the trilinear interpolation in a 3D LUT. Thus we sample
// from the plane of position redCoordLow and on the plane above.
// redCoordLow points to the lower plane to sample from.
float redCoord = color.r * (uColorLutLength - 1.0);
// Clamping to uColorLutLength - 2 is only needed if redCoord points to the
// most upper plane. In this case there would not be any plane above
// available to sample from.
float redCoordLow = clamp(floor(redCoord), 0.0, uColorLutLength - 2.0);
// lowerY is indexed in two steps. First redCoordLow defines the plane to
// sample from. Next the green color component is added to index the row in
// the found plane. As described in the NVIDIA blog article about LUTs
// https://developer.nvidia.com/gpugems/gpugems2/part-iii-high-quality-rendering/chapter-24-using-lookup-tables-accelerate-color
// (Section 24.2), we sample from color * scale + offset, where offset is
// defined by 1 / (2 * uColorLutLength) and the scale is defined by
// (uColorLutLength - 1.0) / uColorLutLength.
// lowerY is indexed in two steps. First redCoordLow defines the plane to
// sample from. Next the green color component is added to index the row in
// the found plane. As described in the NVIDIA blog article about LUTs
// https://developer.nvidia.com/gpugems/gpugems2/part-iii-high-quality-rendering/chapter-24-using-lookup-tables-accelerate-color
// (Section 24.2), we sample from color * scale + offset, where offset is
// defined by 1 / (2 * uColorLutLength) and the scale is defined by
// (uColorLutLength - 1.0) / uColorLutLength.
// The following derives the equation of lowerY. For this let
// N = uColorLutLenght. The general formula to sample at row y
// is defined as y = N * r + g.
// Using the offset and scale as described in NVIDIA's blog article we get:
// y = offset + (N * r + g) * scale
// y = 1 / (2 * N) + (N * r + g) * (N - 1) / N
// y = 1 / (2 * N) + N * r * (N - 1) / N + g * (N - 1) / N
// We have defined redCoord as r * (N - 1) if we excluded the clamping for
// now, giving us:
// y = 1 / (2 * N) + N * redCoord / N + g * (N - 1) / N
// This simplifies to:
// y = 0.5 / N + (N * redCoord + g * (N - 1)) / N
// y = (0.5 + N * redCoord + g * (N - 1)) / N
// This formula now assumes a coordinate system in the range of [0, N] but
// OpenGL uses a [0, 1] unit coordinate system internally. Thus dividing
// by N gives us the final formula for y:
// y = ((0.5 + N * redCoord + g * (N - 1)) / N) / N
// y = (0.5 + redCoord * N + g * (N - 1)) / (N * N)
float lowerY =
(0.5
+ redCoordLow * uColorLutLength
+ color.g * (uColorLutLength - 1.0))
/ (uColorLutLength * uColorLutLength);
// The upperY is the same position moved up by one LUT plane.
float upperY = lowerY + 1.0 / uColorLutLength;
// The following derives the equation of lowerY. For this let
// N = uColorLutLenght. The general formula to sample at row y
// is defined as y = N * r + g.
// Using the offset and scale as described in NVIDIA's blog article we get:
// y = offset + (N * r + g) * scale
// y = 1 / (2 * N) + (N * r + g) * (N - 1) / N
// y = 1 / (2 * N) + N * r * (N - 1) / N + g * (N - 1) / N
// We have defined redCoord as r * (N - 1) if we excluded the clamping for
// now, giving us:
// y = 1 / (2 * N) + N * redCoord / N + g * (N - 1) / N
// This simplifies to:
// y = 0.5 / N + (N * redCoord + g * (N - 1)) / N
// y = (0.5 + N * redCoord + g * (N - 1)) / N
// This formula now assumes a coordinate system in the range of [0, N] but
// OpenGL uses a [0, 1] unit coordinate system internally. Thus dividing
// by N gives us the final formula for y:
// y = ((0.5 + N * redCoord + g * (N - 1)) / N) / N
// y = (0.5 + redCoord * N + g * (N - 1)) / (N * N)
float lowerY = (0.5 + redCoordLow * uColorLutLength +
color.g * (uColorLutLength - 1.0)) /
(uColorLutLength * uColorLutLength);
// The upperY is the same position moved up by one LUT plane.
float upperY = lowerY + 1.0 / uColorLutLength;
// The x position is the blue color channel (x-axis in LUT[R][G][B]).
float x = (0.5 + color.b * (uColorLutLength - 1.0)) / uColorLutLength;
// The x position is the blue color channel (x-axis in LUT[R][G][B]).
float x = (0.5 + color.b * (uColorLutLength - 1.0)) / uColorLutLength;
vec3 lowerRgb = texture2D(uColorLut, vec2(x, lowerY)).rgb;
vec3 upperRgb = texture2D(uColorLut, vec2(x, upperY)).rgb;
vec3 lowerRgb = texture2D(uColorLut, vec2(x, lowerY)).rgb;
vec3 upperRgb = texture2D(uColorLut, vec2(x, upperY)).rgb;
// Linearly interpolate between lowerRgb and upperRgb based on the
// distance of the actual in the plane and the lower sampling position.
return mix(lowerRgb, upperRgb, redCoord - redCoordLow);
// Linearly interpolate between lowerRgb and upperRgb based on the
// distance of the actual in the plane and the lower sampling position.
return mix(lowerRgb, upperRgb, redCoord - redCoordLow);
}
void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
gl_FragColor.rgb = applyLookup(inputColor.rgb);
gl_FragColor.a = inputColor.a;
gl_FragColor.rgb = applyLookup(inputColor.rgb);
gl_FragColor.a = inputColor.a;
}

View file

@ -44,17 +44,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892;
const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) :
a * log(12.0 * linearChannel - b) + c;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
: a * log(12.0 * linearChannel - b) + c;
}
// BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3(
hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b)
);
return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b));
}
// BT.2100 / BT.2020, PQ / ST2084 OETF.

View file

@ -23,7 +23,7 @@ uniform mat4 uRgbMatrix;
varying vec2 vTexSamplingCoord;
void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
gl_FragColor = uRgbMatrix * vec4(inputColor.rgb, 1);
gl_FragColor.a = inputColor.a;
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
gl_FragColor = uRgbMatrix * vec4(inputColor.rgb, 1);
gl_FragColor.a = inputColor.a;
}

View file

@ -66,17 +66,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) {
const highp float a = 0.17883277;
const highp float b = 0.28466892;
const highp float c = 0.55991073;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0 :
(b + exp((hlgChannel - c) / a)) / 12.0;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0
: (b + exp((hlgChannel - c) / a)) / 12.0;
}
// BT.2100 / BT.2020 HLG EOTF.
highp vec3 hlgEotf(highp vec3 hlgColor) {
return vec3(
hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.b)
);
return vec3(hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.b));
}
// BT.2100 / BT.2020 PQ EOTF.
@ -115,18 +113,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) {
// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
const mat3 RGB_TO_XYZ_BT2020 = mat3(
0.63695805f, 0.26270021f, 0.00000000f,
0.14461690f, 0.67799807f, 0.02807269f,
0.16888098f, 0.05930172f, 1.06098506f);
const mat3 RGB_TO_XYZ_BT2020 =
mat3(0.63695805f, 0.26270021f, 0.00000000f, 0.14461690f, 0.67799807f,
0.02807269f, 0.16888098f, 0.05930172f, 1.06098506f);
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
const mat3 XYZ_TO_RGB_BT709 = mat3(
3.24096994f, -0.96924364f, 0.05563008f,
-1.53738318f, 1.87596750f, -0.20397696f,
-0.49861076f, 0.04155506f, 1.05697151f);
const mat3 XYZ_TO_RGB_BT709 =
mat3(3.24096994f, -0.96924364f, 0.05563008f, -1.53738318f, 1.87596750f,
-0.20397696f, -0.49861076f, 0.04155506f, 1.05697151f);
// hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000);
// nominalPeakLuminance was selected to use a 500 as a typical value, used
// in https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// in
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// b/199162498#comment35, and
// https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf.
const float hlgGamma = 1.0735674018211279;
@ -167,17 +164,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892;
const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) :
a * log(12.0 * linearChannel - b) + c;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
: a * log(12.0 * linearChannel - b) + c;
}
// BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3(
hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b)
);
return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b));
}
// BT.2100 / BT.2020, PQ / ST2084 OETF.
@ -199,17 +194,16 @@ highp vec3 pqOetf(highp vec3 linearColor) {
// BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2));
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2));
}
// BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3(
gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
}
// Applies the appropriate OETF to convert linear optical signals to nonlinear
@ -237,9 +231,10 @@ vec3 yuvToRgb(vec3 yuv) {
void main() {
vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz;
vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv));
vec4 opticalColor = (uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec4 opticalColor =
(uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
}

View file

@ -58,17 +58,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) {
const highp float a = 0.17883277;
const highp float b = 0.28466892;
const highp float c = 0.55991073;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0 :
(b + exp((hlgChannel - c) / a)) / 12.0;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0
: (b + exp((hlgChannel - c) / a)) / 12.0;
}
// BT.2100 / BT.2020 HLG EOTF.
highp vec3 hlgEotf(highp vec3 hlgColor) {
return vec3(
hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.b)
);
return vec3(hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.b));
}
// BT.2100 / BT.2020 PQ EOTF.
@ -107,18 +105,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) {
// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
const mat3 RGB_TO_XYZ_BT2020 = mat3(
0.63695805f, 0.26270021f, 0.00000000f,
0.14461690f, 0.67799807f, 0.02807269f,
0.16888098f, 0.05930172f, 1.06098506f);
const mat3 RGB_TO_XYZ_BT2020 =
mat3(0.63695805f, 0.26270021f, 0.00000000f, 0.14461690f, 0.67799807f,
0.02807269f, 0.16888098f, 0.05930172f, 1.06098506f);
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
const mat3 XYZ_TO_RGB_BT709 = mat3(
3.24096994f, -0.96924364f, 0.05563008f,
-1.53738318f, 1.87596750f, -0.20397696f,
-0.49861076f, 0.04155506f, 1.05697151f);
const mat3 XYZ_TO_RGB_BT709 =
mat3(3.24096994f, -0.96924364f, 0.05563008f, -1.53738318f, 1.87596750f,
-0.20397696f, -0.49861076f, 0.04155506f, 1.05697151f);
// hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000);
// nominalPeakLuminance was selected to use a 500 as a typical value, used
// in https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// in
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// b/199162498#comment35, and
// https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf.
const float hlgGamma = 1.0735674018211279;
@ -159,17 +156,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892;
const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) :
a * log(12.0 * linearChannel - b) + c;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
: a * log(12.0 * linearChannel - b) + c;
}
// BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3(
hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b)
);
return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.b));
}
// BT.2100 / BT.2020, PQ / ST2084 OETF.
@ -191,17 +186,16 @@ highp vec3 pqOetf(highp vec3 linearColor) {
// BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2));
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2));
}
// BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3(
gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
}
// Applies the appropriate OETF to convert linear optical signals to nonlinear
@ -222,11 +216,12 @@ highp vec3 applyOetf(highp vec3 linearColor) {
}
void main() {
vec3 opticalColorBt2020 = applyEotf(
texture(uTexSampler, vTexSamplingCoord).xyz);
vec4 opticalColor = (uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec3 opticalColorBt2020 =
applyEotf(texture(uTexSampler, vTexSamplingCoord).xyz);
vec4 opticalColor =
(uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
}

View file

@ -13,7 +13,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
// ES 2 fragment shader that:
// 1. Samples from an external texture with uTexSampler copying from this
// texture to the current output.
@ -39,22 +38,21 @@ const float gamma = 1.0 / inverseGamma;
const int GL_FALSE = 0;
const int GL_TRUE = 1;
// Transforms a single channel from electrical to optical SDR using the SMPTE
// Transforms a single channel from electrical to optical SDR using the SMPTE
// 170M OETF.
float smpte170mEotfSingleChannel(float electricalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return electricalChannel < 0.0812
? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma);
? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma);
}
// Transforms electrical to optical SDR using the SMPTE 170M EOTF.
vec3 smpte170mEotf(vec3 electricalColor) {
return vec3(
smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b));
return vec3(smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b));
}
// Transforms a single channel from optical to electrical SDR.
@ -62,16 +60,15 @@ float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
}
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) {
return vec3(
smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
}
// Applies the appropriate OETF to convert linear optical signals to nonlinear
@ -80,8 +77,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
// LINT.IfChange(color_transfer)
const int COLOR_TRANSFER_LINEAR = 1;
const int COLOR_TRANSFER_SDR_VIDEO = 3;
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR
|| uEnableColorTransfer == GL_FALSE) {
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR ||
uEnableColorTransfer == GL_FALSE) {
return linearColor;
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mOetf(linearColor);
@ -91,8 +88,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
}
}
vec3 applyEotf(vec3 electricalColor){
if (uEnableColorTransfer == GL_TRUE){
vec3 applyEotf(vec3 electricalColor) {
if (uEnableColorTransfer == GL_TRUE) {
return smpte170mEotf(electricalColor);
} else if (uEnableColorTransfer == GL_FALSE) {
return electricalColor;

View file

@ -13,7 +13,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
// ES 2 fragment shader that:
// 1. Samples from an input texture created from an internal texture (e.g. a
// texture created from a bitmap), with uTexSampler copying from this texture
@ -50,17 +49,15 @@ float srgbEotfSingleChannel(float electricalChannel) {
// Specification:
// https://developer.android.com/ndk/reference/group/a-data-space#group___a_data_space_1gga2759ad19cae46646cc5f7002758c4a1cac1bef6aa3a72abbf4a651a0bfb117f96
return electricalChannel <= 0.04045
? electricalChannel / 12.92
: pow((electricalChannel + 0.055) / 1.055, 2.4);
? electricalChannel / 12.92
: pow((electricalChannel + 0.055) / 1.055, 2.4);
}
// Transforms electrical to optical SDR using the sRGB EOTF.
vec3 srgbEotf(const vec3 electricalColor) {
return vec3(
srgbEotfSingleChannel(electricalColor.r),
srgbEotfSingleChannel(electricalColor.g),
srgbEotfSingleChannel(electricalColor.b)
);
return vec3(srgbEotfSingleChannel(electricalColor.r),
srgbEotfSingleChannel(electricalColor.g),
srgbEotfSingleChannel(electricalColor.b));
}
// Transforms a single channel from electrical to optical SDR using the SMPTE
@ -69,16 +66,15 @@ float smpte170mEotfSingleChannel(float electricalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return electricalChannel < 0.0812
? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma);
? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma);
}
// Transforms electrical to optical SDR using the SMPTE 170M EOTF.
vec3 smpte170mEotf(vec3 electricalColor) {
return vec3(
smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b));
return vec3(smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b));
}
// Transforms a single channel from optical to electrical SDR.
@ -86,23 +82,22 @@ float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
}
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) {
return vec3(
smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
}
// Applies the appropriate EOTF to convert nonlinear electrical signals to linear
// optical signals. Input and output are both normalized to [0, 1].
vec3 applyEotf(vec3 electricalColor){
if (uEnableColorTransfer == GL_TRUE){
if (uInputColorTransfer == COLOR_TRANSFER_SRGB){
return srgbEotf(electricalColor) ;
// Applies the appropriate EOTF to convert nonlinear electrical signals to
// linear optical signals. Input and output are both normalized to [0, 1].
vec3 applyEotf(vec3 electricalColor) {
if (uEnableColorTransfer == GL_TRUE) {
if (uInputColorTransfer == COLOR_TRANSFER_SRGB) {
return srgbEotf(electricalColor);
} else if (uInputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mEotf(electricalColor);
} else {
@ -120,8 +115,8 @@ vec3 applyEotf(vec3 electricalColor){
// Applies the appropriate OETF to convert linear optical signals to nonlinear
// electrical signals. Input and output are both normalized to [0, 1].
highp vec3 applyOetf(highp vec3 linearColor) {
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR
|| uEnableColorTransfer == GL_FALSE) {
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR ||
uEnableColorTransfer == GL_FALSE) {
return linearColor;
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mOetf(linearColor);
@ -131,8 +126,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
}
}
vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord){
if (uInputColorTransfer == COLOR_TRANSFER_SRGB){
vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord) {
if (uInputColorTransfer == COLOR_TRANSFER_SRGB) {
// Whereas the Android system uses the top-left corner as (0,0) of the
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
// texture gets flipped. We flip the texture vertically to ensure the
@ -144,8 +139,8 @@ vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord){
}
void main() {
vec4 inputColor = texture2D(
uTexSampler, getAdjustedTexSamplingCoord(vTexSamplingCoord));
vec4 inputColor =
texture2D(uTexSampler, getAdjustedTexSamplingCoord(vTexSamplingCoord));
vec3 linearInputColor = applyEotf(inputColor.rgb);
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);

View file

@ -30,37 +30,35 @@ uniform int uOutputColorTransfer;
const float inverseGamma = 0.4500;
// Transforms a single channel from optical to electrical SDR using the SMPTE
// Transforms a single channel from optical to electrical SDR using the SMPTE
// 170M OETF.
float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
}
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) {
return vec3(
smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
}
// BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_gamma22
return pow(linearChannel, (1.0 / 2.2));
// Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_gamma22
return pow(linearChannel, (1.0 / 2.2));
}
// BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3(
gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.b));
}
// Applies the appropriate OETF to convert linear optical signals to nonlinear
@ -80,8 +78,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
}
void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a);
gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a);
}

View file

@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix;
varying vec2 vTexSamplingCoord;
void main() {
gl_Position = uTransformationMatrix * aFramePosition;
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5, aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5,
aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy;
}

View file

@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix;
out vec2 vTexSamplingCoord;
void main() {
gl_Position = uTransformationMatrix * aFramePosition;
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5, aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5,
aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy;
}