diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_0.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_0.png
new file mode 100644
index 0000000000..1a3a97b271
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_0.png differ
diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_1.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_1.png
new file mode 100644
index 0000000000..aaa24ca839
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_1.png differ
diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_2.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_2.png
new file mode 100644
index 0000000000..f0f2bbc6b5
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withHdrThenSdr_whenHdrEditingUnsupported_2.png differ
diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_0.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_0.png
new file mode 100644
index 0000000000..8d41ad9e75
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_0.png differ
diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_1.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_1.png
new file mode 100644
index 0000000000..ce34d0e80e
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_1.png differ
diff --git a/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_2.png b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_2.png
new file mode 100644
index 0000000000..48742f7561
Binary files /dev/null and b/libraries/test_data/src/test/assets/media/bitmap/transformer_sequence_effect_test/export_withSdrThenHdr_2.png differ
diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/SequenceEffectTestUtil.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/SequenceEffectTestUtil.java
new file mode 100644
index 0000000000..20cdefbbc8
--- /dev/null
+++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/SequenceEffectTestUtil.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package androidx.media3.transformer;
+
+import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA;
+import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
+import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
+import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
+import static com.google.common.truth.Truth.assertWithMessage;
+
+import android.graphics.Bitmap;
+import androidx.annotation.Nullable;
+import androidx.media3.common.Effect;
+import androidx.media3.common.MediaItem;
+import androidx.media3.common.util.Util;
+import androidx.media3.effect.Presentation;
+import com.google.common.collect.ImmutableList;
+import java.io.IOException;
+import java.util.List;
+
+/** Utility class for checking testing {@link EditedMediaItemSequence} instances. */
+public final class SequenceEffectTestUtil {
+ private static final String PNG_ASSET_BASE_PATH = "media/bitmap/transformer_sequence_effect_test";
+ public static final long SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS = 50;
+
+ private SequenceEffectTestUtil() {}
+
+ /**
+ * Creates a {@link Composition} with the specified {@link Presentation} and {@link
+ * EditedMediaItem} instances.
+ */
+ public static Composition createComposition(
+ @Nullable Presentation presentation,
+ EditedMediaItem editedMediaItem,
+ EditedMediaItem... editedMediaItems) {
+ Composition.Builder builder =
+ new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems));
+ if (presentation != null) {
+ builder.setEffects(
+ new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.of(presentation)));
+ }
+ return builder.build();
+ }
+
+ /**
+ * Creates an {@link EditedMediaItem} with a video at {@code uri} clipped to the {@code
+ * endPositionMs}, with {@code effects} applied.
+ *
+ *
This may be used to, for example, clip to only the first frame of a video.
+ */
+ public static EditedMediaItem clippedVideo(String uri, List effects, long endPositionMs) {
+ return new EditedMediaItem.Builder(
+ MediaItem.fromUri(uri)
+ .buildUpon()
+ .setClippingConfiguration(
+ new MediaItem.ClippingConfiguration.Builder()
+ .setEndPositionMs(endPositionMs)
+ .build())
+ .build())
+ .setRemoveAudio(true)
+ .setEffects(
+ new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
+ .build();
+ }
+
+ /**
+ * Creates an {@link EditedMediaItem} with an image at {@code uri}, shown once, with {@code
+ * effects} applied.
+ */
+ public static EditedMediaItem oneFrameFromImage(String uri, List effects) {
+ return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
+ // 50ms for a 20-fps video is one frame.
+ .setFrameRate(20)
+ .setDurationUs(50_000)
+ .setEffects(
+ new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
+ .build();
+ }
+
+ /**
+ * Assert that the bitmaps output in {@link #PNG_ASSET_BASE_PATH} match those written in {code
+ * actualBitmaps}.
+ *
+ * Also saves {@code actualBitmaps} bitmaps, in case they differ from expected bitmaps, stored
+ * at {@link #PNG_ASSET_BASE_PATH}/{@code testId}_id.png.
+ */
+ public static void assertBitmapsMatchExpectedAndSave(List actualBitmaps, String testId)
+ throws IOException {
+ for (int i = 0; i < actualBitmaps.size(); i++) {
+ Bitmap actualBitmap = actualBitmaps.get(i);
+ maybeSaveTestBitmap(
+ testId, /* bitmapLabel= */ String.valueOf(i), actualBitmap, /* path= */ null);
+ String subTestId = testId + "_" + i;
+ String expectedPath = Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId);
+ Bitmap expectedBitmap = readBitmap(expectedPath);
+
+ float averagePixelAbsoluteDifference =
+ getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, subTestId);
+ assertWithMessage("For expected bitmap " + expectedPath)
+ .that(averagePixelAbsoluteDifference)
+ .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
+ }
+ }
+}
diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/TransformerSequenceEffectTest.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/TransformerSequenceEffectTest.java
index edac2a7e5a..04af39f6e1 100644
--- a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/TransformerSequenceEffectTest.java
+++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/TransformerSequenceEffectTest.java
@@ -19,9 +19,6 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
-import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA;
-import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
-import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI_STRING;
@@ -29,13 +26,15 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
+import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
+import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
+import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
+import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
+import static androidx.media3.transformer.SequenceEffectTestUtil.oneFrameFromImage;
import static com.google.common.truth.Truth.assertThat;
-import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assume.assumeFalse;
import android.content.Context;
-import android.graphics.Bitmap;
-import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Util;
@@ -49,19 +48,17 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
-import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests for using different {@linkplain Effect effects} for {@link MediaItem MediaItems} in one
- * {@link EditedMediaItemSequence} .
+ * {@link EditedMediaItemSequence}.
*/
@RunWith(AndroidJUnit4.class)
public final class TransformerSequenceEffectTest {
private static final ImmutableList NO_EFFECT = ImmutableList.of();
- private static final String PNG_ASSET_BASE_PATH = "media/bitmap/transformer_sequence_effect_test";
private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
private static final int EXPORT_WIDTH = 360;
private static final int EXPORT_HEIGHT = 240;
@@ -82,11 +79,12 @@ public final class TransformerSequenceEffectTest {
Composition composition =
createComposition(
/* presentation= */ null,
- oneFrameFromVideo(
+ clippedVideo(
MP4_ASSET_URI_STRING,
ImmutableList.of(
Presentation.createForWidthAndHeight(
- EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT))),
+ EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
oneFrameFromImage(
JPG_ASSET_URI_STRING,
ImmutableList.of(
@@ -108,7 +106,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
- assertBitmapsMatchExpected(
+ assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@@ -142,14 +140,17 @@ public final class TransformerSequenceEffectTest {
Presentation.createForWidthAndHeight(
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT))),
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
- oneFrameFromVideo(
- MP4_ASSET_URI_STRING, ImmutableList.of(RgbFilter.createInvertedFilter())),
- oneFrameFromVideo(
+ clippedVideo(
+ MP4_ASSET_URI_STRING,
+ ImmutableList.of(RgbFilter.createInvertedFilter()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(
MP4_ASSET_URI_STRING,
ImmutableList.of(
Presentation.createForWidthAndHeight(
EXPORT_WIDTH / 2, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT),
- createOverlayEffect())));
+ createOverlayEffect()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
@@ -157,7 +158,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
- assertBitmapsMatchExpected(
+ assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@@ -175,8 +176,9 @@ public final class TransformerSequenceEffectTest {
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
- oneFrameFromVideo(MP4_PORTRAIT_ASSET_URI_STRING, NO_EFFECT),
- oneFrameFromVideo(MP4_ASSET_URI_STRING, NO_EFFECT),
+ clippedVideo(
+ MP4_PORTRAIT_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
oneFrameFromImage(JPG_PORTRAIT_ASSET_URI_STRING, NO_EFFECT));
ExportTestResult result =
@@ -185,7 +187,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
- assertBitmapsMatchExpected(
+ assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@@ -203,9 +205,11 @@ public final class TransformerSequenceEffectTest {
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
- oneFrameFromVideo(MP4_ASSET_URI_STRING, NO_EFFECT),
- oneFrameFromVideo(
- MP4_PORTRAIT_ASSET_URI_STRING, ImmutableList.of(RgbFilter.createInvertedFilter())));
+ clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(
+ MP4_PORTRAIT_ASSET_URI_STRING,
+ ImmutableList.of(RgbFilter.createInvertedFilter()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
@@ -213,7 +217,7 @@ public final class TransformerSequenceEffectTest {
.run(testId, composition);
assertThat(result.filePath).isNotNull();
- assertBitmapsMatchExpected(
+ assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@@ -222,61 +226,4 @@ public final class TransformerSequenceEffectTest {
ImmutableList.of(
BitmapOverlay.createStaticBitmapOverlay(readBitmap(OVERLAY_PNG_ASSET_PATH))));
}
-
- private static Composition createComposition(
- @Nullable Presentation presentation,
- EditedMediaItem editedMediaItem,
- EditedMediaItem... editedMediaItems) {
- Composition.Builder builder =
- new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems));
- if (presentation != null) {
- builder.setEffects(
- new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.of(presentation)));
- }
- return builder.build();
- }
-
- private static EditedMediaItem oneFrameFromVideo(String uri, List effects) {
- return new EditedMediaItem.Builder(
- MediaItem.fromUri(uri)
- .buildUpon()
- .setClippingConfiguration(
- new MediaItem.ClippingConfiguration.Builder()
- // Clip to only the first frame.
- .setEndPositionMs(50)
- .build())
- .build())
- .setRemoveAudio(true)
- .setEffects(
- new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
- .build();
- }
-
- private static EditedMediaItem oneFrameFromImage(String uri, List effects) {
- return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
- // 50ms for a 20-fps video is one frame.
- .setFrameRate(20)
- .setDurationUs(50_000)
- .setEffects(
- new Effects(/* audioProcessors= */ ImmutableList.of(), ImmutableList.copyOf(effects)))
- .build();
- }
-
- private static void assertBitmapsMatchExpected(List actualBitmaps, String testId)
- throws IOException {
- for (int i = 0; i < actualBitmaps.size(); i++) {
- Bitmap actualBitmap = actualBitmaps.get(i);
- String subTestId = testId + "_" + i;
- Bitmap expectedBitmap =
- readBitmap(Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId));
-
- maybeSaveTestBitmap(
- testId, /* bitmapLabel= */ String.valueOf(i), actualBitmap, /* path= */ null);
- float averagePixelAbsoluteDifference =
- getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, subTestId);
- assertWithMessage("For expected bitmap %s.png", subTestId)
- .that(averagePixelAbsoluteDifference)
- .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
- }
- }
}
diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/TransformerSequenceEffectTestWithHdr.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/TransformerSequenceEffectTestWithHdr.java
new file mode 100644
index 0000000000..c1f91dbd78
--- /dev/null
+++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/TransformerSequenceEffectTestWithHdr.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package androidx.media3.transformer.mh;
+
+import static androidx.media3.common.MimeTypes.VIDEO_H265;
+import static androidx.media3.common.util.Assertions.checkNotNull;
+import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
+import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
+import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
+import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
+import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
+import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
+import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
+import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
+import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
+import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.skipAndLogIfOpenGlToneMappingUnsupported;
+import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assume.assumeFalse;
+import static org.junit.Assume.assumeTrue;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+import androidx.media3.common.ColorInfo;
+import androidx.media3.common.Effect;
+import androidx.media3.common.MediaItem;
+import androidx.media3.effect.Presentation;
+import androidx.media3.effect.RgbFilter;
+import androidx.media3.effect.ScaleAndRotateTransformation;
+import androidx.media3.transformer.Composition;
+import androidx.media3.transformer.EditedMediaItemSequence;
+import androidx.media3.transformer.EncoderUtil;
+import androidx.media3.transformer.ExportException;
+import androidx.media3.transformer.ExportTestResult;
+import androidx.media3.transformer.Transformer;
+import androidx.media3.transformer.TransformerAndroidTestRunner;
+import androidx.test.core.app.ApplicationProvider;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import com.google.common.collect.ImmutableList;
+import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
+import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
+import org.checkerframework.checker.nullness.qual.RequiresNonNull;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+
+/**
+ * Tests for using different {@linkplain Effect effects} for {@link MediaItem MediaItems} in one
+ * {@link EditedMediaItemSequence}, with HDR assets.
+ */
+@RunWith(AndroidJUnit4.class)
+public final class TransformerSequenceEffectTestWithHdr {
+
+ private static final int EXPORT_HEIGHT = 240;
+ @Rule public final TestName testName = new TestName();
+
+ private final Context context = ApplicationProvider.getApplicationContext();
+
+ private @MonotonicNonNull String testId;
+
+ @Before
+ @EnsuresNonNull({"testId"})
+ public void setUp() {
+ testId = testName.getMethodName();
+ }
+
+ @Test
+ @RequiresNonNull("testId")
+ public void export_withSdrThenHdr() throws Exception {
+ assumeFalse(
+ skipAndLogIfOpenGlToneMappingUnsupported(
+ testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT));
+ Composition composition =
+ createComposition(
+ Presentation.createForHeight(EXPORT_HEIGHT),
+ clippedVideo(
+ MP4_PORTRAIT_ASSET_URI_STRING,
+ ImmutableList.of(RgbFilter.createInvertedFilter()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(
+ MP4_ASSET_720P_4_SECOND_HDR10,
+ ImmutableList.of(
+ new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
+
+ ExportTestResult result =
+ new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
+ .build()
+ .run(testId, composition);
+
+ assertThat(result.filePath).isNotNull();
+ // Expected bitmaps were generated on the Pixel 7 Pro, because emulators don't
+ // support decoding HDR.
+ assertBitmapsMatchExpectedAndSave(
+ extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
+ }
+
+ /**
+ * If the first asset in a sequence is HDR, then Transformer will output HDR. However, because SDR
+ * to HDR tone-mapping is not implemented, VideoFrameProcessor cannot take a later SDR input asset
+ * after already being configured for HDR output.
+ */
+ @Test
+ @RequiresNonNull("testId")
+ public void export_withHdrThenSdr_throws_whenHdrEditingSupported() throws Exception {
+ assumeTrue(
+ "Device does not support HDR10 editing.",
+ deviceSupportsHdrEditing(
+ VIDEO_H265, checkNotNull(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT.colorInfo)));
+ assumeFalse(
+ skipAndLogIfFormatsUnsupported(
+ context,
+ testId,
+ /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
+ /* outputFormat= */ null));
+ Composition composition =
+ createComposition(
+ Presentation.createForHeight(EXPORT_HEIGHT),
+ clippedVideo(
+ MP4_ASSET_720P_4_SECOND_HDR10,
+ ImmutableList.of(
+ new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(
+ MP4_PORTRAIT_ASSET_URI_STRING,
+ ImmutableList.of(RgbFilter.createInvertedFilter()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
+
+ @Nullable ExportException expectedException = null;
+ try {
+ new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
+ .build()
+ .run(testId, composition);
+ } catch (ExportException e) {
+ expectedException = e;
+ }
+ assertThat(expectedException).isNotNull();
+ assertThat(checkNotNull(checkNotNull(expectedException).getMessage()))
+ .isEqualTo("Video frame processing error");
+ }
+
+ /**
+ * If the first asset in a sequence is HDR, but HDR editing is not supported, then the first asset
+ * will fallback to OpenGL tone-mapping, and configure VideoFrameProcessor for SDR output.
+ */
+ @Test
+ @RequiresNonNull("testId")
+ public void export_withHdrThenSdr_whenHdrEditingUnsupported() throws Exception {
+ assumeFalse(
+ "Device supports HDR10 editing.",
+ deviceSupportsHdrEditing(
+ VIDEO_H265, checkNotNull(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT.colorInfo)));
+ assumeFalse(
+ skipAndLogIfOpenGlToneMappingUnsupported(
+ testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT));
+ assumeFalse(
+ skipAndLogIfFormatsUnsupported(
+ context,
+ testId,
+ /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
+ /* outputFormat= */ null));
+ Composition composition =
+ createComposition(
+ Presentation.createForHeight(EXPORT_HEIGHT),
+ clippedVideo(
+ MP4_ASSET_720P_4_SECOND_HDR10,
+ ImmutableList.of(
+ new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
+ clippedVideo(
+ MP4_PORTRAIT_ASSET_URI_STRING,
+ ImmutableList.of(RgbFilter.createInvertedFilter()),
+ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
+
+ ExportTestResult result =
+ new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
+ .build()
+ .run(testId, composition);
+
+ assertThat(result.filePath).isNotNull();
+ // Expected bitmaps were generated on the Samsung S22 Ultra (US), because emulators don't
+ // support decoding HDR, and the Pixel 7 Pro does support HDR editing.
+ assertBitmapsMatchExpectedAndSave(
+ extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
+ }
+
+ private static boolean deviceSupportsHdrEditing(String mimeType, ColorInfo colorInfo) {
+ return !EncoderUtil.getSupportedEncodersForHdrEditing(mimeType, colorInfo).isEmpty();
+ }
+}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/AudioGraphInput.java b/libraries/transformer/src/main/java/androidx/media3/transformer/AudioGraphInput.java
index 7be7d0b9bc..7fd9f3f1ff 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/AudioGraphInput.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/AudioGraphInput.java
@@ -119,19 +119,19 @@ import java.util.concurrent.atomic.AtomicReference;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
- @Nullable Format trackFormat,
+ @Nullable Format decodedFormat,
boolean isLast) {
- if (trackFormat == null) {
+ if (decodedFormat == null) {
checkState(
durationUs != C.TIME_UNSET,
"Could not generate silent audio because duration is unknown.");
} else {
- checkState(MimeTypes.isAudio(trackFormat.sampleMimeType));
- AudioFormat trackAudioFormat = new AudioFormat(trackFormat);
- checkState(isInputAudioFormatValid(trackAudioFormat), /* errorMessage= */ trackAudioFormat);
+ checkState(MimeTypes.isAudio(decodedFormat.sampleMimeType));
+ AudioFormat audioFormat = new AudioFormat(decodedFormat);
+ checkState(isInputAudioFormatValid(audioFormat), /* errorMessage= */ audioFormat);
}
pendingMediaItemChange.set(
- new MediaItemChange(editedMediaItem, durationUs, trackFormat, isLast));
+ new MediaItemChange(editedMediaItem, durationUs, decodedFormat, isLast));
}
@Override
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/EncodedSampleExporter.java b/libraries/transformer/src/main/java/androidx/media3/transformer/EncodedSampleExporter.java
index 1b64ab6af1..03097e1119 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/EncodedSampleExporter.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/EncodedSampleExporter.java
@@ -82,7 +82,7 @@ import java.util.concurrent.atomic.AtomicLong;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
- @Nullable Format trackFormat,
+ @Nullable Format decodedFormat,
boolean isLast) {
mediaItemOffsetUs = nextMediaItemOffsetUs.get();
nextMediaItemOffsetUs.addAndGet(durationUs);
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderBaseRenderer.java b/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderBaseRenderer.java
index 1a155ea4ac..d67842e23f 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderBaseRenderer.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderBaseRenderer.java
@@ -156,9 +156,14 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
}
}
- /** Overrides the {@code inputFormat}. */
- protected Format overrideFormat(Format inputFormat) {
- return inputFormat;
+ /** Overrides the input {@code format}. */
+ protected Format overrideInputFormat(Format format) {
+ return format;
+ }
+
+ /** Overrides the output {@code format}. */
+ protected Format overrideOutputFormat(Format format) {
+ return format;
}
/** Called when the {@link Format} of the samples fed to the renderer is known. */
@@ -212,7 +217,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (result != C.RESULT_FORMAT_READ) {
return false;
}
- inputFormat = overrideFormat(checkNotNull(formatHolder.format));
+ inputFormat = overrideInputFormat(checkNotNull(formatHolder.format));
onInputFormatRead(inputFormat);
shouldInitDecoder =
assetLoaderListener.onTrackAdded(
@@ -257,11 +262,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (decoderOutputFormat == null) {
return false;
}
- outputFormat = decoderOutputFormat;
+ outputFormat = overrideOutputFormat(decoderOutputFormat);
} else {
// TODO(b/278259383): Move surface creation out of video sampleConsumer. Init decoder and
// get decoderOutput Format before init sampleConsumer.
- outputFormat = inputFormat;
+ outputFormat = overrideOutputFormat(inputFormat);
}
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderVideoRenderer.java b/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderVideoRenderer.java
index b522bd4f2d..a64921cfe9 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderVideoRenderer.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/ExoAssetLoaderVideoRenderer.java
@@ -15,6 +15,7 @@
*/
package androidx.media3.transformer;
+import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
@@ -56,18 +57,38 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
decodeOnlyPresentationTimestamps = new ArrayList<>();
}
+ public static ColorInfo getDecoderOutputColor(
+ ColorInfo decoderInputColor, boolean isMediaCodecToneMappingRequested) {
+ if (isMediaCodecToneMappingRequested && ColorInfo.isTransferHdr(decoderInputColor)) {
+ return SDR_BT709_LIMITED;
+ }
+ return decoderInputColor;
+ }
+
@Override
public String getName() {
return TAG;
}
@Override
- protected Format overrideFormat(Format inputFormat) {
+ protected Format overrideInputFormat(Format format) {
if (hdrMode == Composition.HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR
- && ColorInfo.isTransferHdr(inputFormat.colorInfo)) {
- return inputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
+ && ColorInfo.isTransferHdr(format.colorInfo)) {
+ return format.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
}
- return inputFormat;
+ return format;
+ }
+
+ @Override
+ protected Format overrideOutputFormat(Format format) {
+ // Gets the expected output color from the decoder, based on the input track format, if
+ // tone-mapping is applied.
+ ColorInfo validColor = VideoSampleExporter.getValidColor(format.colorInfo);
+ boolean isDecoderToneMappingRequested =
+ hdrMode == Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
+ ColorInfo outputColor = getDecoderOutputColor(validColor, isDecoderToneMappingRequested);
+
+ return format.buildUpon().setColorInfo(outputColor).build();
}
@Override
@@ -89,7 +110,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
checkStateNotNull(sampleConsumer);
boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo)
- && !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
+ && hdrMode == Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
decoder =
decoderFactory.createForVideoDecoding(
inputFormat,
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/OnMediaItemChangedListener.java b/libraries/transformer/src/main/java/androidx/media3/transformer/OnMediaItemChangedListener.java
index 03e498c263..4f4fef1e99 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/OnMediaItemChangedListener.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/OnMediaItemChangedListener.java
@@ -28,15 +28,15 @@ import androidx.media3.common.MediaItem;
*
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param durationUs The duration of the {@link MediaItem}, in microseconds.
- * @param trackFormat The {@link Format} extracted (and possibly decoded) from the {@link
- * MediaItem} track, which represents the samples input to the {@link SampleExporter}. {@code
- * null} if no such track was extracted.
+ * @param decodedFormat The {@link Format} decoded from the {@link MediaItem} track, which
+ * represents the samples output from the {@link SampleExporter}. {@code null} if no such
+ * track was decoded.
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
* SampleExporter}.
*/
void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
- @Nullable Format trackFormat,
+ @Nullable Format decodedFormat,
boolean isLast);
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/SequenceAssetLoader.java b/libraries/transformer/src/main/java/androidx/media3/transformer/SequenceAssetLoader.java
index 960aca68cb..f7931a655f 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/SequenceAssetLoader.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/SequenceAssetLoader.java
@@ -285,24 +285,25 @@ import java.util.concurrent.atomic.AtomicInteger;
sampleConsumersByTrackType.entrySet()) {
int outputTrackType = entry.getKey();
if (trackType != outputTrackType) {
- onMediaItemChanged(outputTrackType, /* format= */ null);
+ onMediaItemChanged(outputTrackType, /* outputFormat= */ null);
}
}
}
return sampleConsumer;
}
- private void onMediaItemChanged(int trackType, @Nullable Format format) {
+ private void onMediaItemChanged(int trackType, @Nullable Format outputFormat) {
@Nullable
OnMediaItemChangedListener onMediaItemChangedListener =
mediaItemChangedListenersByTrackType.get(trackType);
if (onMediaItemChangedListener == null) {
return;
}
+
onMediaItemChangedListener.onMediaItemChanged(
editedMediaItems.get(currentMediaItemIndex),
currentAssetDurationUs,
- format,
+ /* decodedFormat= */ outputFormat,
/* isLast= */ currentMediaItemIndex == editedMediaItems.size() - 1);
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/TextureAssetLoader.java b/libraries/transformer/src/main/java/androidx/media3/transformer/TextureAssetLoader.java
index a04ccdcf48..bedcb4293d 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/TextureAssetLoader.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/TextureAssetLoader.java
@@ -80,7 +80,12 @@ public final class TextureAssetLoader implements AssetLoader {
checkArgument(format.height != Format.NO_VALUE && format.width != Format.NO_VALUE);
this.editedMediaItem = editedMediaItem;
this.assetLoaderListener = assetLoaderListener;
- this.format = format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_RAW).build();
+ this.format =
+ format
+ .buildUpon()
+ .setColorInfo(VideoSampleExporter.getValidColor(format.colorInfo))
+ .setSampleMimeType(MimeTypes.VIDEO_RAW)
+ .build();
this.frameProcessedListener = frameProcessedListener;
progressState = PROGRESS_STATE_NOT_STARTED;
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
index 4c860f79fe..2a5bfea02f 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
@@ -584,10 +584,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
GraphInput sampleExporterInput =
sampleExporter.getInput(firstEditedMediaItem, assetLoaderOutputFormat);
OnMediaItemChangedListener onMediaItemChangedListener =
- (editedMediaItem, durationUs, trackFormat, isLast) -> {
+ (editedMediaItem, durationUs, decodedFormat, isLast) -> {
onMediaItemChanged(trackType, durationUs, isLast);
sampleExporterInput.onMediaItemChanged(
- editedMediaItem, durationUs, trackFormat, isLast);
+ editedMediaItem, durationUs, decodedFormat, isLast);
};
sequenceAssetLoaders
.get(sequenceIndex)
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
index 69beafcece..53fef7ddda 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
@@ -64,19 +64,18 @@ import java.util.concurrent.atomic.AtomicLong;
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
- @Nullable Format trackFormat,
+ @Nullable Format decodedFormat,
boolean isLast) {
- if (trackFormat != null) {
- Size decodedSize = getDecodedSize(trackFormat);
- ColorInfo colorInfo =
- trackFormat.colorInfo == null || !trackFormat.colorInfo.isDataSpaceValid()
- ? inputColorInfo
- : trackFormat.colorInfo;
+ if (decodedFormat != null) {
+ Size decodedSize = getDecodedSize(decodedFormat);
videoFrameProcessor.registerInputStream(
- getInputType(checkNotNull(trackFormat.sampleMimeType)),
+ getInputType(checkNotNull(decodedFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
- new FrameInfo.Builder(colorInfo, decodedSize.getWidth(), decodedSize.getHeight())
- .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
+ new FrameInfo.Builder(
+ checkNotNull(decodedFormat.colorInfo),
+ decodedSize.getWidth(),
+ decodedSize.getHeight())
+ .setPixelWidthHeightRatio(decodedFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(initialTimestampOffsetUs + mediaItemOffsetUs.get())
.build());
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
index 146247d2b1..8f22a42c42 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
@@ -74,6 +74,18 @@ import org.checkerframework.dataflow.qual.Pure;
private boolean hasMuxedTimestampZero;
+ // TODO: b/307952514 - Move this method to a color utility.
+ /**
+ * Adjust for invalid {@link ColorInfo} values, by defaulting to {@link
+ * ColorInfo#SDR_BT709_LIMITED}.
+ */
+ public static ColorInfo getValidColor(@Nullable ColorInfo colorInfo) {
+ if (colorInfo == null || !colorInfo.isDataSpaceValid()) {
+ return ColorInfo.SDR_BT709_LIMITED;
+ }
+ return colorInfo;
+ }
+
public VideoSampleExporter(
Context context,
Format firstInputFormat,
@@ -95,12 +107,7 @@ import org.checkerframework.dataflow.qual.Pure;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
finalFramePresentationTimeUs = C.TIME_UNSET;
- ColorInfo decoderInputColor;
- if (firstInputFormat.colorInfo == null || !firstInputFormat.colorInfo.isDataSpaceValid()) {
- decoderInputColor = ColorInfo.SDR_BT709_LIMITED;
- } else {
- decoderInputColor = firstInputFormat.colorInfo;
- }
+ ColorInfo decoderInputColor = getValidColor(firstInputFormat.colorInfo);
encoderWrapper =
new EncoderWrapper(
encoderFactory,
@@ -112,11 +119,13 @@ import org.checkerframework.dataflow.qual.Pure;
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
@Composition.HdrMode int hdrModeAfterFallback = encoderWrapper.getHdrModeAfterFallback();
- boolean isMediaCodecToneMapping =
- hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
- && ColorInfo.isTransferHdr(decoderInputColor);
+ boolean isMediaCodecToneMappingRequested =
+ hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
+ // TODO: b/278259383 - After solving the bug, we can use the decoder output format, and no
+ // longer need to import this color conversion method.
ColorInfo videoGraphInputColor =
- isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
+ ExoAssetLoaderVideoRenderer.getDecoderOutputColor(
+ decoderInputColor, isMediaCodecToneMappingRequested);
boolean isGlToneMapping =
hdrModeAfterFallback == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL