Transformer: Allow single-sequence mixing HDR and SDR input.

Previously, input assets had to be all SDR or all HDR.

After this CL, if tone-mapping is requested, HDR and SDR may mix in any order. If tone-mapping is not requested, SDR may precede HDR, but not vice versa, until SDR to HDR tone-mapping is implemented

Some changes to accomplish this include:
1. Inputting the decoded format's color to VideoFrameProcessor.registerInputStream
for each stream.
2. Calculating the estimated decoded format's color for each stream, by estimating
it based on MediaCodec tone-mapping.

PiperOrigin-RevId: 602747837
This commit is contained in:
huangdarwin 2024-01-30 09:17:52 -08:00 committed by Copybara-Service
parent 7c8a31e2b1
commit 3e59c113d7
19 changed files with 440 additions and 129 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View file

@ -0,0 +1,118 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package androidx.media3.transformer;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static com.google.common.truth.Truth.assertWithMessage;
import android.graphics.Bitmap;
import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Util;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
/** Utility class for checking testing {@link EditedMediaItemSequence} instances. */
public final class SequenceEffectTestUtil {
private static final String PNG_ASSET_BASE_PATH = "media/bitmap/transformer_sequence_effect_test";
public static final long SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS = 50;
private SequenceEffectTestUtil() {}
/**
* Creates a {@link Composition} with the specified {@link Presentation} and {@link
* EditedMediaItem} instances.
*/
public static Composition createComposition(
@Nullable Presentation presentation,
EditedMediaItem editedMediaItem,
EditedMediaItem... editedMediaItems) {
Composition.Builder builder =
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems));
if (presentation != null) {
builder.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.of(presentation)));
}
return builder.build();
}
/**
* Creates an {@link EditedMediaItem} with a video at {@code uri} clipped to the {@code
* endPositionMs}, with {@code effects} applied.
*
* <p>This may be used to, for example, clip to only the first frame of a video.
*/
public static EditedMediaItem clippedVideo(String uri, List<Effect> effects, long endPositionMs) {
return new EditedMediaItem.Builder(
MediaItem.fromUri(uri)
.buildUpon()
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setEndPositionMs(endPositionMs)
.build())
.build())
.setRemoveAudio(true)
.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
.build();
}
/**
* Creates an {@link EditedMediaItem} with an image at {@code uri}, shown once, with {@code
* effects} applied.
*/
public static EditedMediaItem oneFrameFromImage(String uri, List<Effect> effects) {
return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
// 50ms for a 20-fps video is one frame.
.setFrameRate(20)
.setDurationUs(50_000)
.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
.build();
}
/**
* Assert that the bitmaps output in {@link #PNG_ASSET_BASE_PATH} match those written in {code
* actualBitmaps}.
*
* <p>Also saves {@code actualBitmaps} bitmaps, in case they differ from expected bitmaps, stored
* at {@link #PNG_ASSET_BASE_PATH}/{@code testId}_id.png.
*/
public static void assertBitmapsMatchExpectedAndSave(List<Bitmap> actualBitmaps, String testId)
throws IOException {
for (int i = 0; i < actualBitmaps.size(); i++) {
Bitmap actualBitmap = actualBitmaps.get(i);
maybeSaveTestBitmap(
testId, /* bitmapLabel= */ String.valueOf(i), actualBitmap, /* path= */ null);
String subTestId = testId + "_" + i;
String expectedPath = Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId);
Bitmap expectedBitmap = readBitmap(expectedPath);
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, subTestId);
assertWithMessage("For expected bitmap " + expectedPath)
.that(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
}
}
}

View file

@ -19,9 +19,6 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI_STRING;
@ -29,13 +26,15 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.SequenceEffectTestUtil.oneFrameFromImage;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assume.assumeFalse;
import android.content.Context;
import android.graphics.Bitmap;
import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Util;
@ -49,19 +48,17 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests for using different {@linkplain Effect effects} for {@link MediaItem MediaItems} in one
* {@link EditedMediaItemSequence} .
* {@link EditedMediaItemSequence}.
*/
@RunWith(AndroidJUnit4.class)
public final class TransformerSequenceEffectTest {
private static final ImmutableList<Effect> NO_EFFECT = ImmutableList.of();
private static final String PNG_ASSET_BASE_PATH = "media/bitmap/transformer_sequence_effect_test";
private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
private static final int EXPORT_WIDTH = 360;
private static final int EXPORT_HEIGHT = 240;
@ -82,11 +79,12 @@ public final class TransformerSequenceEffectTest {
Composition composition =
createComposition(
/* presentation= */ null,
oneFrameFromVideo(
clippedVideo(
MP4_ASSET_URI_STRING,
ImmutableList.of(
Presentation.createForWidthAndHeight(
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT))),
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
oneFrameFromImage(
JPG_ASSET_URI_STRING,
ImmutableList.of(
@ -108,7 +106,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
assertBitmapsMatchExpected(
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@ -142,14 +140,17 @@ public final class TransformerSequenceEffectTest {
Presentation.createForWidthAndHeight(
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT))),
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
oneFrameFromVideo(
MP4_ASSET_URI_STRING, ImmutableList.of(RgbFilter.createInvertedFilter())),
oneFrameFromVideo(
clippedVideo(
MP4_ASSET_URI_STRING,
ImmutableList.of(RgbFilter.createInvertedFilter()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(
MP4_ASSET_URI_STRING,
ImmutableList.of(
Presentation.createForWidthAndHeight(
EXPORT_WIDTH / 2, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT),
createOverlayEffect())));
createOverlayEffect()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
@ -157,7 +158,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
assertBitmapsMatchExpected(
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@ -175,8 +176,9 @@ public final class TransformerSequenceEffectTest {
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
oneFrameFromVideo(MP4_PORTRAIT_ASSET_URI_STRING, NO_EFFECT),
oneFrameFromVideo(MP4_ASSET_URI_STRING, NO_EFFECT),
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
oneFrameFromImage(JPG_PORTRAIT_ASSET_URI_STRING, NO_EFFECT));
ExportTestResult result =
@ -185,7 +187,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
assertBitmapsMatchExpected(
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@ -203,9 +205,11 @@ public final class TransformerSequenceEffectTest {
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
oneFrameFromVideo(MP4_ASSET_URI_STRING, NO_EFFECT),
oneFrameFromVideo(
MP4_PORTRAIT_ASSET_URI_STRING, ImmutableList.of(RgbFilter.createInvertedFilter())));
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING,
ImmutableList.of(RgbFilter.createInvertedFilter()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
@ -213,7 +217,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
assertBitmapsMatchExpected(
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@ -222,61 +226,4 @@ public final class TransformerSequenceEffectTest {
ImmutableList.of(
BitmapOverlay.createStaticBitmapOverlay(readBitmap(OVERLAY_PNG_ASSET_PATH))));
}
private static Composition createComposition(
@Nullable Presentation presentation,
EditedMediaItem editedMediaItem,
EditedMediaItem... editedMediaItems) {
Composition.Builder builder =
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems));
if (presentation != null) {
builder.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.of(presentation)));
}
return builder.build();
}
private static EditedMediaItem oneFrameFromVideo(String uri, List<Effect> effects) {
return new EditedMediaItem.Builder(
MediaItem.fromUri(uri)
.buildUpon()
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
// Clip to only the first frame.
.setEndPositionMs(50)
.build())
.build())
.setRemoveAudio(true)
.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
.build();
}
private static EditedMediaItem oneFrameFromImage(String uri, List<Effect> effects) {
return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
// 50ms for a 20-fps video is one frame.
.setFrameRate(20)
.setDurationUs(50_000)
.setEffects(
new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
.build();
}
private static void assertBitmapsMatchExpected(List<Bitmap> actualBitmaps, String testId)
throws IOException {
for (int i = 0; i < actualBitmaps.size(); i++) {
Bitmap actualBitmap = actualBitmaps.get(i);
String subTestId = testId + "_" + i;
Bitmap expectedBitmap =
readBitmap(Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId));
maybeSaveTestBitmap(
testId, /* bitmapLabel= */ String.valueOf(i), actualBitmap, /* path= */ null);
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, subTestId);
assertWithMessage("For expected bitmap %s.png", subTestId)
.that(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
}
}
}

View file

@ -0,0 +1,206 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package androidx.media3.transformer.mh;
import static androidx.media3.common.MimeTypes.VIDEO_H265;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.skipAndLogIfOpenGlToneMappingUnsupported;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.RgbFilter;
import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.EncoderUtil;
import androidx.media3.transformer.ExportException;
import androidx.media3.transformer.ExportTestResult;
import androidx.media3.transformer.Transformer;
import androidx.media3.transformer.TransformerAndroidTestRunner;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
/**
* Tests for using different {@linkplain Effect effects} for {@link MediaItem MediaItems} in one
* {@link EditedMediaItemSequence}, with HDR assets.
*/
@RunWith(AndroidJUnit4.class)
public final class TransformerSequenceEffectTestWithHdr {
private static final int EXPORT_HEIGHT = 240;
@Rule public final TestName testName = new TestName();
private final Context context = ApplicationProvider.getApplicationContext();
private @MonotonicNonNull String testId;
@Before
@EnsuresNonNull({"testId"})
public void setUp() {
testId = testName.getMethodName();
}
@Test
@RequiresNonNull("testId")
public void export_withSdrThenHdr() throws Exception {
assumeFalse(
skipAndLogIfOpenGlToneMappingUnsupported(
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT));
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING,
ImmutableList.of(RgbFilter.createInvertedFilter()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(
MP4_ASSET_720P_4_SECOND_HDR10,
ImmutableList.of(
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.build()
.run(testId, composition);
assertThat(result.filePath).isNotNull();
// Expected bitmaps were generated on the Pixel 7 Pro, because emulators don't
// support decoding HDR.
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
/**
* If the first asset in a sequence is HDR, then Transformer will output HDR. However, because SDR
* to HDR tone-mapping is not implemented, VideoFrameProcessor cannot take a later SDR input asset
* after already being configured for HDR output.
*/
@Test
@RequiresNonNull("testId")
public void export_withHdrThenSdr_throws_whenHdrEditingSupported() throws Exception {
assumeTrue(
"Device does not support HDR10 editing.",
deviceSupportsHdrEditing(
VIDEO_H265, checkNotNull(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT.colorInfo)));
assumeFalse(
skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null));
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
clippedVideo(
MP4_ASSET_720P_4_SECOND_HDR10,
ImmutableList.of(
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING,
ImmutableList.of(RgbFilter.createInvertedFilter()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
@Nullable ExportException expectedException = null;
try {
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.build()
.run(testId, composition);
} catch (ExportException e) {
expectedException = e;
}
assertThat(expectedException).isNotNull();
assertThat(checkNotNull(checkNotNull(expectedException).getMessage()))
.isEqualTo("Video frame processing error");
}
/**
* If the first asset in a sequence is HDR, but HDR editing is not supported, then the first asset
* will fallback to OpenGL tone-mapping, and configure VideoFrameProcessor for SDR output.
*/
@Test
@RequiresNonNull("testId")
public void export_withHdrThenSdr_whenHdrEditingUnsupported() throws Exception {
assumeFalse(
"Device supports HDR10 editing.",
deviceSupportsHdrEditing(
VIDEO_H265, checkNotNull(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT.colorInfo)));
assumeFalse(
skipAndLogIfOpenGlToneMappingUnsupported(
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT));
assumeFalse(
skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null));
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
clippedVideo(
MP4_ASSET_720P_4_SECOND_HDR10,
ImmutableList.of(
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING,
ImmutableList.of(RgbFilter.createInvertedFilter()),
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.build()
.run(testId, composition);
assertThat(result.filePath).isNotNull();
// Expected bitmaps were generated on the Samsung S22 Ultra (US), because emulators don't
// support decoding HDR, and the Pixel 7 Pro does support HDR editing.
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
private static boolean deviceSupportsHdrEditing(String mimeType, ColorInfo colorInfo) {
return !EncoderUtil.getSupportedEncodersForHdrEditing(mimeType, colorInfo).isEmpty();
}
}

View file

@ -119,19 +119,19 @@ import java.util.concurrent.atomic.AtomicReference;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
@Nullable Format decodedFormat,
boolean isLast) {
if (trackFormat == null) {
if (decodedFormat == null) {
checkState(
durationUs != C.TIME_UNSET,
"Could not generate silent audio because duration is unknown.");
} else {
checkState(MimeTypes.isAudio(trackFormat.sampleMimeType));
AudioFormat trackAudioFormat = new AudioFormat(trackFormat);
checkState(isInputAudioFormatValid(trackAudioFormat), /* errorMessage= */ trackAudioFormat);
checkState(MimeTypes.isAudio(decodedFormat.sampleMimeType));
AudioFormat audioFormat = new AudioFormat(decodedFormat);
checkState(isInputAudioFormatValid(audioFormat), /* errorMessage= */ audioFormat);
}
pendingMediaItemChange.set(
new MediaItemChange(editedMediaItem, durationUs, trackFormat, isLast));
new MediaItemChange(editedMediaItem, durationUs, decodedFormat, isLast));
}
@Override

View file

@ -82,7 +82,7 @@ import java.util.concurrent.atomic.AtomicLong;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
@Nullable Format decodedFormat,
boolean isLast) {
mediaItemOffsetUs = nextMediaItemOffsetUs.get();
nextMediaItemOffsetUs.addAndGet(durationUs);

View file

@ -156,9 +156,14 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
}
}
/** Overrides the {@code inputFormat}. */
protected Format overrideFormat(Format inputFormat) {
return inputFormat;
/** Overrides the input {@code format}. */
protected Format overrideInputFormat(Format format) {
return format;
}
/** Overrides the output {@code format}. */
protected Format overrideOutputFormat(Format format) {
return format;
}
/** Called when the {@link Format} of the samples fed to the renderer is known. */
@ -212,7 +217,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (result != C.RESULT_FORMAT_READ) {
return false;
}
inputFormat = overrideFormat(checkNotNull(formatHolder.format));
inputFormat = overrideInputFormat(checkNotNull(formatHolder.format));
onInputFormatRead(inputFormat);
shouldInitDecoder =
assetLoaderListener.onTrackAdded(
@ -257,11 +262,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (decoderOutputFormat == null) {
return false;
}
outputFormat = decoderOutputFormat;
outputFormat = overrideOutputFormat(decoderOutputFormat);
} else {
// TODO(b/278259383): Move surface creation out of video sampleConsumer. Init decoder and
// get decoderOutput Format before init sampleConsumer.
outputFormat = inputFormat;
outputFormat = overrideOutputFormat(inputFormat);
}
}

View file

@ -15,6 +15,7 @@
*/
package androidx.media3.transformer;
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
@ -56,18 +57,38 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
decodeOnlyPresentationTimestamps = new ArrayList<>();
}
public static ColorInfo getDecoderOutputColor(
ColorInfo decoderInputColor, boolean isMediaCodecToneMappingRequested) {
if (isMediaCodecToneMappingRequested && ColorInfo.isTransferHdr(decoderInputColor)) {
return SDR_BT709_LIMITED;
}
return decoderInputColor;
}
@Override
public String getName() {
return TAG;
}
@Override
protected Format overrideFormat(Format inputFormat) {
protected Format overrideInputFormat(Format format) {
if (hdrMode == Composition.HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR
&& ColorInfo.isTransferHdr(inputFormat.colorInfo)) {
return inputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
&& ColorInfo.isTransferHdr(format.colorInfo)) {
return format.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
}
return inputFormat;
return format;
}
@Override
protected Format overrideOutputFormat(Format format) {
// Gets the expected output color from the decoder, based on the input track format, if
// tone-mapping is applied.
ColorInfo validColor = VideoSampleExporter.getValidColor(format.colorInfo);
boolean isDecoderToneMappingRequested =
hdrMode == Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
ColorInfo outputColor = getDecoderOutputColor(validColor, isDecoderToneMappingRequested);
return format.buildUpon().setColorInfo(outputColor).build();
}
@Override
@ -89,7 +110,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
checkStateNotNull(sampleConsumer);
boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo)
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
&& hdrMode == Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
decoder =
decoderFactory.createForVideoDecoding(
inputFormat,

View file

@ -28,15 +28,15 @@ import androidx.media3.common.MediaItem;
*
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param durationUs The duration of the {@link MediaItem}, in microseconds.
* @param trackFormat The {@link Format} extracted (and possibly decoded) from the {@link
* MediaItem} track, which represents the samples input to the {@link SampleExporter}. {@code
* null} if no such track was extracted.
* @param decodedFormat The {@link Format} decoded from the {@link MediaItem} track, which
* represents the samples output from the {@link SampleExporter}. {@code null} if no such
* track was decoded.
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
* SampleExporter}.
*/
void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
@Nullable Format decodedFormat,
boolean isLast);
}

View file

@ -285,24 +285,25 @@ import java.util.concurrent.atomic.AtomicInteger;
sampleConsumersByTrackType.entrySet()) {
int outputTrackType = entry.getKey();
if (trackType != outputTrackType) {
onMediaItemChanged(outputTrackType, /* format= */ null);
onMediaItemChanged(outputTrackType, /* outputFormat= */ null);
}
}
}
return sampleConsumer;
}
private void onMediaItemChanged(int trackType, @Nullable Format format) {
private void onMediaItemChanged(int trackType, @Nullable Format outputFormat) {
@Nullable
OnMediaItemChangedListener onMediaItemChangedListener =
mediaItemChangedListenersByTrackType.get(trackType);
if (onMediaItemChangedListener == null) {
return;
}
onMediaItemChangedListener.onMediaItemChanged(
editedMediaItems.get(currentMediaItemIndex),
currentAssetDurationUs,
format,
/* decodedFormat= */ outputFormat,
/* isLast= */ currentMediaItemIndex == editedMediaItems.size() - 1);
}

View file

@ -80,7 +80,12 @@ public final class TextureAssetLoader implements AssetLoader {
checkArgument(format.height != Format.NO_VALUE && format.width != Format.NO_VALUE);
this.editedMediaItem = editedMediaItem;
this.assetLoaderListener = assetLoaderListener;
this.format = format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_RAW).build();
this.format =
format
.buildUpon()
.setColorInfo(VideoSampleExporter.getValidColor(format.colorInfo))
.setSampleMimeType(MimeTypes.VIDEO_RAW)
.build();
this.frameProcessedListener = frameProcessedListener;
progressState = PROGRESS_STATE_NOT_STARTED;
}

View file

@ -584,10 +584,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
GraphInput sampleExporterInput =
sampleExporter.getInput(firstEditedMediaItem, assetLoaderOutputFormat);
OnMediaItemChangedListener onMediaItemChangedListener =
(editedMediaItem, durationUs, trackFormat, isLast) -> {
(editedMediaItem, durationUs, decodedFormat, isLast) -> {
onMediaItemChanged(trackType, durationUs, isLast);
sampleExporterInput.onMediaItemChanged(
editedMediaItem, durationUs, trackFormat, isLast);
editedMediaItem, durationUs, decodedFormat, isLast);
};
sequenceAssetLoaders
.get(sequenceIndex)

View file

@ -64,19 +64,18 @@ import java.util.concurrent.atomic.AtomicLong;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
@Nullable Format decodedFormat,
boolean isLast) {
if (trackFormat != null) {
Size decodedSize = getDecodedSize(trackFormat);
ColorInfo colorInfo =
trackFormat.colorInfo == null || !trackFormat.colorInfo.isDataSpaceValid()
? inputColorInfo
: trackFormat.colorInfo;
if (decodedFormat != null) {
Size decodedSize = getDecodedSize(decodedFormat);
videoFrameProcessor.registerInputStream(
getInputType(checkNotNull(trackFormat.sampleMimeType)),
getInputType(checkNotNull(decodedFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
new FrameInfo.Builder(colorInfo, decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
new FrameInfo.Builder(
checkNotNull(decodedFormat.colorInfo),
decodedSize.getWidth(),
decodedSize.getHeight())
.setPixelWidthHeightRatio(decodedFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(initialTimestampOffsetUs + mediaItemOffsetUs.get())
.build());
}

View file

@ -74,6 +74,18 @@ import org.checkerframework.dataflow.qual.Pure;
private boolean hasMuxedTimestampZero;
// TODO: b/307952514 - Move this method to a color utility.
/**
* Adjust for invalid {@link ColorInfo} values, by defaulting to {@link
* ColorInfo#SDR_BT709_LIMITED}.
*/
public static ColorInfo getValidColor(@Nullable ColorInfo colorInfo) {
if (colorInfo == null || !colorInfo.isDataSpaceValid()) {
return ColorInfo.SDR_BT709_LIMITED;
}
return colorInfo;
}
public VideoSampleExporter(
Context context,
Format firstInputFormat,
@ -95,12 +107,7 @@ import org.checkerframework.dataflow.qual.Pure;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
finalFramePresentationTimeUs = C.TIME_UNSET;
ColorInfo decoderInputColor;
if (firstInputFormat.colorInfo == null || !firstInputFormat.colorInfo.isDataSpaceValid()) {
decoderInputColor = ColorInfo.SDR_BT709_LIMITED;
} else {
decoderInputColor = firstInputFormat.colorInfo;
}
ColorInfo decoderInputColor = getValidColor(firstInputFormat.colorInfo);
encoderWrapper =
new EncoderWrapper(
encoderFactory,
@ -112,11 +119,13 @@ import org.checkerframework.dataflow.qual.Pure;
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
@Composition.HdrMode int hdrModeAfterFallback = encoderWrapper.getHdrModeAfterFallback();
boolean isMediaCodecToneMapping =
hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
&& ColorInfo.isTransferHdr(decoderInputColor);
boolean isMediaCodecToneMappingRequested =
hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
// TODO: b/278259383 - After solving the bug, we can use the decoder output format, and no
// longer need to import this color conversion method.
ColorInfo videoGraphInputColor =
isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
ExoAssetLoaderVideoRenderer.getDecoderOutputColor(
decoderInputColor, isMediaCodecToneMappingRequested);
boolean isGlToneMapping =
hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL