diff --git a/RELEASENOTES.md b/RELEASENOTES.md
index 6b709a0f5a..0823db0e4b 100644
--- a/RELEASENOTES.md
+++ b/RELEASENOTES.md
@@ -12,6 +12,8 @@
* Add `selectedAudioLanguage` parameter to
`DefaultTrackSelector.selectVideoTrack()` method.
* Transformer:
+ * Update parameters of `VideoFrameProcessor.registerInputStream` and
+ `VideoFrameProcessor.Listener.onInputStreamRegistered` to use `Format`.
* Extractors:
* Fix media duration parsing in `mdhd` box of MP4 files to handle `-1`
values ([#1819](https://github.com/androidx/media/issues/1819)).
diff --git a/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java b/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java
index 35598483a9..9c6b4a4c84 100644
--- a/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java
+++ b/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java
@@ -18,123 +18,34 @@ package androidx.media3.common;
import static androidx.media3.common.util.Assertions.checkArgument;
import androidx.media3.common.util.UnstableApi;
-import com.google.errorprone.annotations.CanIgnoreReturnValue;
/** Value class specifying information about a decoded video frame. */
@UnstableApi
public class FrameInfo {
- /** A builder for {@link FrameInfo} instances. */
- public static final class Builder {
-
- private ColorInfo colorInfo;
- private int width;
- private int height;
- private float pixelWidthHeightRatio;
- private long offsetToAddUs;
-
- /**
- * Creates an instance with default values.
- *
- * @param colorInfo The {@link ColorInfo}.
- * @param width The frame width, in pixels.
- * @param height The frame height, in pixels.
- */
- public Builder(ColorInfo colorInfo, int width, int height) {
- this.colorInfo = colorInfo;
- this.width = width;
- this.height = height;
- pixelWidthHeightRatio = 1;
- }
-
- /** Creates an instance with the values of the provided {@link FrameInfo}. */
- public Builder(FrameInfo frameInfo) {
- colorInfo = frameInfo.colorInfo;
- width = frameInfo.width;
- height = frameInfo.height;
- pixelWidthHeightRatio = frameInfo.pixelWidthHeightRatio;
- offsetToAddUs = frameInfo.offsetToAddUs;
- }
-
- /** Sets the {@link ColorInfo}. */
- @CanIgnoreReturnValue
- public Builder setColorInfo(ColorInfo colorInfo) {
- this.colorInfo = colorInfo;
- return this;
- }
-
- /** Sets the frame width, in pixels. */
- @CanIgnoreReturnValue
- public Builder setWidth(int width) {
- this.width = width;
- return this;
- }
-
- /** Sets the frame height, in pixels. */
- @CanIgnoreReturnValue
- public Builder setHeight(int height) {
- this.height = height;
- return this;
- }
-
- /**
- * Sets the ratio of width over height for each pixel.
- *
- *
The default value is {@code 1}.
- */
- @CanIgnoreReturnValue
- public Builder setPixelWidthHeightRatio(float pixelWidthHeightRatio) {
- this.pixelWidthHeightRatio = pixelWidthHeightRatio;
- return this;
- }
-
- /**
- * Sets the {@linkplain FrameInfo#offsetToAddUs offset to add} to the frame presentation
- * timestamp, in microseconds.
- *
- *
The default value is {@code 0}.
- */
- @CanIgnoreReturnValue
- public Builder setOffsetToAddUs(long offsetToAddUs) {
- this.offsetToAddUs = offsetToAddUs;
- return this;
- }
-
- /** Builds a {@link FrameInfo} instance. */
- public FrameInfo build() {
- return new FrameInfo(colorInfo, width, height, pixelWidthHeightRatio, offsetToAddUs);
- }
- }
-
- /** The {@link ColorInfo} of the frame. */
- public final ColorInfo colorInfo;
-
- /** The width of the frame, in pixels. */
- public final int width;
-
- /** The height of the frame, in pixels. */
- public final int height;
-
- /** The ratio of width over height for each pixel. */
- public final float pixelWidthHeightRatio;
-
/**
- * The offset that must be added to the frame presentation timestamp, in microseconds.
+ * The {@link Format} of the frame.
*
- *
This offset is not part of the input timestamps. It is added to the frame timestamps before
- * processing, and is retained in the output timestamps.
+ *
The {@link Format#colorInfo} must be set, and the {@link Format#width} and {@link
+ * Format#height} must be greater than 0.
*/
+ public final Format format;
+
+ /** The offset that must be added to the frame presentation timestamp, in microseconds. */
public final long offsetToAddUs;
- private FrameInfo(
- ColorInfo colorInfo, int width, int height, float pixelWidthHeightRatio, long offsetToAddUs) {
- checkArgument(width > 0, "width must be positive, but is: " + width);
- checkArgument(height > 0, "height must be positive, but is: " + height);
+ /**
+ * Creates an instance.
+ *
+ * @param format See {@link #format}.
+ * @param offsetToAddUs See {@link #offsetToAddUs}.
+ */
+ public FrameInfo(Format format, long offsetToAddUs) {
+ checkArgument(format.colorInfo != null, "format colorInfo must be set");
+ checkArgument(format.width > 0, "format width must be positive, but is: " + format.width);
+ checkArgument(format.height > 0, "format height must be positive, but is: " + format.height);
- this.colorInfo = colorInfo;
- this.width = width;
- this.height = height;
- this.pixelWidthHeightRatio = pixelWidthHeightRatio;
+ this.format = format;
this.offsetToAddUs = offsetToAddUs;
}
}
diff --git a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java
index c5bcc6ab02..7bff863d7b 100644
--- a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java
+++ b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java
@@ -84,8 +84,8 @@ public interface VideoFrameProcessor {
* Input frames come from the {@linkplain #getInputSurface input surface} and don't need to be
* {@linkplain #registerInputFrame registered} (unlike with {@link #INPUT_TYPE_SURFACE}).
*
- *
Every frame must use the {@linkplain #registerInputStream(int, List, FrameInfo) input
- * stream's registered} frame info. Also sets the surface's {@linkplain
+ *
Every frame must use the {@linkplain #registerInputStream input stream's registered} frame
+ * format. Also sets the surface's {@linkplain
* android.graphics.SurfaceTexture#setDefaultBufferSize(int, int) default buffer size}.
*/
int INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION = 4;
@@ -131,8 +131,8 @@ public interface VideoFrameProcessor {
interface Listener {
/**
- * Called when the {@link VideoFrameProcessor} finishes {@linkplain #registerInputStream(int,
- * List, FrameInfo) registering an input stream}.
+ * Called when the {@link VideoFrameProcessor} finishes {@linkplain #registerInputStream
+ * registering an input stream}.
*
*
The {@link VideoFrameProcessor} is now ready to accept new input {@linkplain
* VideoFrameProcessor#registerInputFrame frames}, {@linkplain
@@ -140,11 +140,11 @@ public interface VideoFrameProcessor {
* VideoFrameProcessor#queueInputTexture(int, long) textures}.
*
* @param inputType The {@link InputType} of the new input stream.
+ * @param format The {@link Format} of the new input stream.
* @param effects The list of {@link Effect effects} to apply to the new input stream.
- * @param frameInfo The {@link FrameInfo} of the new input stream.
*/
default void onInputStreamRegistered(
- @InputType int inputType, List effects, FrameInfo frameInfo) {}
+ @InputType int inputType, Format format, List effects) {}
/**
* Called when the output size changes.
@@ -196,8 +196,8 @@ public interface VideoFrameProcessor {
/**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
*
- * Can be called many times after {@link #registerInputStream(int, List, FrameInfo) registering
- * the input stream} to put multiple frames in the same input stream.
+ *
Can be called many times after {@link #registerInputStream registering the input stream} to
+ * put multiple frames in the same input stream.
*
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param timestampIterator A {@link TimestampIterator} generating the exact timestamps that the
@@ -271,14 +271,20 @@ public interface VideoFrameProcessor {
* #queueInputTexture queued}.
*
*
This method blocks the calling thread until the previous calls to this method finish, that
- * is when {@link Listener#onInputStreamRegistered(int, List, FrameInfo)} is called after the
+ * is when {@link Listener#onInputStreamRegistered(int, Format, List)} is called after the
* underlying processing pipeline has been adapted to the registered input stream.
*
* @param inputType The {@link InputType} of the new input stream.
+ * @param format The {@link Format} of the new input stream. The {@link Format#colorInfo}, the
+ * {@link Format#width}, the {@link Format#height} and the {@link
+ * Format#pixelWidthHeightRatio} must be set.
* @param effects The list of {@link Effect effects} to apply to the new input stream.
- * @param frameInfo The {@link FrameInfo} of the new input stream.
+ * @param offsetToAddUs The offset that must be added to the frame presentation timestamps, in
+ * microseconds. This offset is not part of the input timestamps. It is added to the frame
+ * timestamps before processing, and is retained in the output timestamps.
*/
- void registerInputStream(@InputType int inputType, List effects, FrameInfo frameInfo);
+ void registerInputStream(
+ @InputType int inputType, Format format, List effects, long offsetToAddUs);
/**
* Informs the {@code VideoFrameProcessor} that a frame will be queued to its {@linkplain
@@ -287,11 +293,10 @@ public interface VideoFrameProcessor {
* Must be called before rendering a frame to the input surface. The caller must not render
* frames to the {@linkplain #getInputSurface input surface} when {@code false} is returned.
*
- * @return Whether the input frame was successfully registered. If {@link
- * #registerInputStream(int, List, FrameInfo)} is called, this method returns {@code false}
- * until {@link Listener#onInputStreamRegistered(int, List, FrameInfo)} is called. Otherwise,
- * a return value of {@code false} indicates the {@code VideoFrameProcessor} is not ready to
- * accept input.
+ * @return Whether the input frame was successfully registered. If {@link #registerInputStream} is
+ * called, this method returns {@code false} until {@link
+ * Listener#onInputStreamRegistered(int, Format, List)} is called. Otherwise, a return value
+ * of {@code false} indicates the {@code VideoFrameProcessor} is not ready to accept input.
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorTest.java
index 0eb11fc4d4..73acf5e216 100644
--- a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorTest.java
+++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorTest.java
@@ -30,7 +30,7 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.FrameInfo;
+import androidx.media3.common.Format;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.ConditionVariable;
@@ -43,6 +43,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.List;
+import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
@@ -90,8 +91,8 @@ public class DefaultVideoFrameProcessorTest {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
inputStreamRegisteredCountDownLatch.countDown();
}
@@ -115,9 +116,13 @@ public class DefaultVideoFrameProcessorTest {
});
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(100)
+ .setHeight(100)
+ .build(),
ImmutableList.of(),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, /* width= */ 100, /* height= */ 100)
- .build());
+ /* offsetToAddUs= */ 0);
assertThat(defaultVideoFrameProcessor.getPendingInputFrameCount()).isEqualTo(0);
// Unblocks configuration.
@@ -141,10 +146,10 @@ public class DefaultVideoFrameProcessorTest {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
registeredInputStreamInfoWidths.add(
- new InputStreamInfo(inputType, effects, frameInfo));
+ new InputStreamInfo(inputType, format, effects));
countDownLatch.countDown();
}
@@ -157,21 +162,30 @@ public class DefaultVideoFrameProcessorTest {
InputStreamInfo stream1 =
new InputStreamInfo(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
- ImmutableList.of(),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, /* width= */ 100, /* height= */ 100)
- .build());
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(100)
+ .setHeight(100)
+ .build(),
+ ImmutableList.of());
InputStreamInfo stream2 =
new InputStreamInfo(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
- ImmutableList.of(new Contrast(.5f)),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, /* width= */ 200, /* height= */ 200)
- .build());
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(200)
+ .setHeight(200)
+ .build(),
+ ImmutableList.of(new Contrast(.5f)));
InputStreamInfo stream3 =
new InputStreamInfo(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
- ImmutableList.of(),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, /* width= */ 300, /* height= */ 300)
- .build());
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(300)
+ .setHeight(300)
+ .build(),
+ ImmutableList.of());
registerInputStream(defaultVideoFrameProcessor, stream1);
registerInputStream(defaultVideoFrameProcessor, stream2);
@@ -207,8 +221,8 @@ public class DefaultVideoFrameProcessorTest {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
inputStreamRegisteredCondition.open();
}
@@ -241,9 +255,13 @@ public class DefaultVideoFrameProcessorTest {
inputStreamRegisteredCondition.close();
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(bitmap1.getWidth())
+ .setHeight(bitmap1.getHeight())
+ .build(),
ImmutableList.of(),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap1.getWidth(), bitmap1.getHeight())
- .build());
+ /* offsetToAddUs= */ 0);
inputStreamRegisteredCondition.block();
defaultVideoFrameProcessor.queueInputBitmap(
bitmap1, new ConstantRateTimestampIterator(C.MICROS_PER_SECOND, 30.f));
@@ -252,14 +270,18 @@ public class DefaultVideoFrameProcessorTest {
inputStreamRegisteredCondition.close();
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(bitmap2.getWidth())
+ .setHeight(bitmap2.getHeight())
+ .build(),
ImmutableList.of(
(GlEffect)
(context, useHdr) -> {
secondStreamConfigurationTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
return new PassthroughShaderProgram();
}),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap2.getWidth(), bitmap2.getHeight())
- .build());
+ /* offsetToAddUs= */ 0);
inputStreamRegisteredCondition.block();
defaultVideoFrameProcessor.queueInputBitmap(
bitmap2, new ConstantRateTimestampIterator(C.MICROS_PER_SECOND, 30.f));
@@ -287,8 +309,8 @@ public class DefaultVideoFrameProcessorTest {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
inputStreamRegisteredCountDownLatch.countDown();
}
@@ -311,9 +333,13 @@ public class DefaultVideoFrameProcessorTest {
Bitmap bitmap = BitmapPixelTestUtil.readBitmap(ORIGINAL_PNG_ASSET_PATH);
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SRGB_BT709_FULL)
+ .setWidth(bitmap.getWidth())
+ .setHeight(bitmap.getHeight())
+ .build(),
/* effects= */ ImmutableList.of(),
- new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap.getWidth(), bitmap.getHeight())
- .build());
+ /* offsetToAddUs= */ 0);
inputStreamRegisteredCountDownLatch.await();
checkState(defaultVideoFrameProcessor.registerInputFrame());
@@ -355,19 +381,22 @@ public class DefaultVideoFrameProcessorTest {
private static void registerInputStream(
DefaultVideoFrameProcessor defaultVideoFrameProcessor, InputStreamInfo inputStreamInfo) {
defaultVideoFrameProcessor.registerInputStream(
- inputStreamInfo.inputType, inputStreamInfo.effects, inputStreamInfo.frameInfo);
+ inputStreamInfo.inputType,
+ inputStreamInfo.format,
+ inputStreamInfo.effects,
+ /* offsetToAddUs= */ 0);
}
private static final class InputStreamInfo {
public final @VideoFrameProcessor.InputType int inputType;
+ public final Format format;
public final List effects;
- public final FrameInfo frameInfo;
private InputStreamInfo(
- @VideoFrameProcessor.InputType int inputType, List effects, FrameInfo frameInfo) {
+ @VideoFrameProcessor.InputType int inputType, Format format, List effects) {
this.inputType = inputType;
+ this.format = format;
this.effects = effects;
- this.frameInfo = frameInfo;
}
@Override
@@ -380,16 +409,16 @@ public class DefaultVideoFrameProcessorTest {
}
InputStreamInfo that = (InputStreamInfo) o;
return inputType == that.inputType
- && Util.areEqual(this.effects, that.effects)
- && Util.areEqual(this.frameInfo, that.frameInfo);
+ && Objects.equals(this.format, that.format)
+ && Objects.equals(this.effects, that.effects);
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + inputType;
+ result = 31 * result + format.hashCode();
result = 31 * result + effects.hashCode();
- result = 31 * result + frameInfo.hashCode();
return result;
}
}
diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java
index a90768e23c..df3622a37d 100644
--- a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java
+++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java
@@ -28,7 +28,7 @@ import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.FrameInfo;
+import androidx.media3.common.Format;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
@@ -267,8 +267,8 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
videoFrameProcessorReadyCountDownLatch.countDown();
}
@@ -315,8 +315,13 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
checkNotNull(defaultVideoFrameProcessor)
.registerInputStream(
INPUT_TYPE_SURFACE,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SDR_BT709_LIMITED)
+ .setWidth(WIDTH)
+ .setHeight(HEIGHT)
+ .build(),
/* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
- new FrameInfo.Builder(ColorInfo.SDR_BT709_LIMITED, WIDTH, HEIGHT).build());
+ /* offsetToAddUs= */ 0);
boolean testTimedOut = false;
if (!videoFrameProcessorReadyCountDownLatch.await(TEST_TIMEOUT_MS, MILLISECONDS)) {
testTimedOut = true;
diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/EffectsTestUtil.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/EffectsTestUtil.java
index 644a85e78e..f54f2df1de 100644
--- a/libraries/effect/src/androidTest/java/androidx/media3/effect/EffectsTestUtil.java
+++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/EffectsTestUtil.java
@@ -31,7 +31,7 @@ import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.FrameInfo;
+import androidx.media3.common.Format;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer;
@@ -133,8 +133,8 @@ import java.util.concurrent.atomic.AtomicReference;
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
videoFrameProcessorReadyCountDownLatch.countDown();
}
@@ -162,6 +162,11 @@ import java.util.concurrent.atomic.AtomicReference;
checkNotNull(defaultVideoFrameProcessor)
.registerInputStream(
INPUT_TYPE_SURFACE,
+ new Format.Builder()
+ .setColorInfo(ColorInfo.SDR_BT709_LIMITED)
+ .setWidth(frameWidth)
+ .setHeight(frameHeight)
+ .build(),
/* effects= */ ImmutableList.of(
(GlEffect) (context, useHdr) -> blankFrameProducer,
// Use an overlay effect to generate bitmaps with timestamps on it.
@@ -177,7 +182,7 @@ import java.util.concurrent.atomic.AtomicReference;
}
})),
glEffect),
- new FrameInfo.Builder(ColorInfo.SDR_BT709_LIMITED, frameWidth, frameHeight).build());
+ /* offsetToAddUs= */ 0);
videoFrameProcessorReadyCountDownLatch.await();
checkNoVideoFrameProcessingExceptionIsThrown(videoFrameProcessingExceptionReference);
blankFrameProducer.produceBlankFrames(presentationTimesUs);
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/BitmapTextureManager.java b/libraries/effect/src/main/java/androidx/media3/effect/BitmapTextureManager.java
index 5ca81958bc..85e3674aa3 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/BitmapTextureManager.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/BitmapTextureManager.java
@@ -169,8 +169,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
EVENT_QUEUE_BITMAP,
currentPresentationTimeUs,
/* extraFormat= */ "%dx%d",
- /* extraArgs...= */ currentFrameInfo.width,
- currentFrameInfo.height);
+ /* extraArgs...= */ currentFrameInfo.format.width,
+ currentFrameInfo.format.height);
if (!currentBitmapInfo.inStreamOffsetsUs.hasNext()) {
isNextFrameInTexture = false;
@@ -216,8 +216,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
currentTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
- frameInfo.width,
- frameInfo.height);
+ frameInfo.format.width,
+ frameInfo.format.height);
if (Util.SDK_INT >= 34 && bitmap.hasGainmap()) {
checkNotNull(repeatingGainmapShaderProgram).setGainmap(checkNotNull(bitmap.getGainmap()));
}
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
index 9e8425a64b..2dd3c0833f 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
@@ -46,6 +46,7 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
+import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.MediaLibraryInfo;
@@ -201,16 +202,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* change between input streams is handled frame-exactly. If {@code false}, {@link
* #registerInputFrame} can be called only once for each {@linkplain #registerInputStream
* registered input stream} before rendering the first frame to the input {@link
- * #getInputSurface() Surface}. The same registered {@link FrameInfo} is repeated for the
+ * #getInputSurface() Surface}. The same registered {@link Format} is repeated for the
* subsequent frames. To ensure the format change between input streams is applied on the
- * right frame, the caller needs to {@linkplain #registerInputStream(int, List, FrameInfo)
- * register} the new input stream strictly after rendering all frames from the previous input
- * stream. This mode should be used in streams where users don't have direct control over
- * rendering frames, like in a camera feed.
+ * right frame, the caller needs to {@linkplain #registerInputStream register} the new input
+ * stream strictly after rendering all frames from the previous input stream. This mode should
+ * be used in streams where users don't have direct control over rendering frames, like in a
+ * camera feed.
*
- * Regardless of the value set, {@link #registerInputStream(int, List, FrameInfo)} must be
- * called for each input stream to specify the format for upcoming frames before calling
- * {@link #registerInputFrame()}.
+ *
Regardless of the value set, {@link #registerInputStream} must be called for each input
+ * stream to specify the format for upcoming frames before calling {@link
+ * #registerInputFrame()}.
*
* @param requireRegisteringAllInputFrames Whether registering every input frame is required.
* @deprecated For automatic frame registration ({@code
@@ -457,8 +458,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final ConditionVariable inputStreamRegisteredCondition;
/**
- * The input stream that is {@linkplain #registerInputStream(int, List, FrameInfo) registered},
- * but the pipeline has not adapted to processing it.
+ * The input stream that is {@linkplain #registerInputStream registered}, but the pipeline has not
+ * adapted to processing it.
*/
@GuardedBy("lock")
@Nullable
@@ -567,10 +568,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
FrameInfo frameInfo = checkNotNull(this.nextInputFrameInfo);
inputSwitcher
.activeTextureManager()
- .queueInputBitmap(
- inputBitmap,
- new FrameInfo.Builder(frameInfo).setOffsetToAddUs(frameInfo.offsetToAddUs).build(),
- timestampIterator);
+ .queueInputBitmap(inputBitmap, frameInfo, timestampIterator);
return true;
}
@@ -609,18 +607,18 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/**
* {@inheritDoc}
*
- *
Using HDR {@link FrameInfo#colorInfo} requires OpenGL ES 3.0 and the {@code EXT_YUV_target}
+ *
Using HDR {@link Format#colorInfo} requires OpenGL ES 3.0 and the {@code EXT_YUV_target}
* OpenGL extension.
*
*
{@link Effect}s are applied on {@link C#COLOR_RANGE_FULL} colors with {@code null} {@link
* ColorInfo#hdrStaticInfo}.
*
- *
If either {@link FrameInfo#colorInfo} or {@code outputColorInfo} {@linkplain
+ *
If either {@link Format#colorInfo} or {@code outputColorInfo} {@linkplain
* ColorInfo#isTransferHdr} are HDR}, textures will use {@link GLES30#GL_RGBA16F} and {@link
* GLES30#GL_HALF_FLOAT}. Otherwise, textures will use {@link GLES20#GL_RGBA} and {@link
* GLES20#GL_UNSIGNED_BYTE}.
*
- *
If {@linkplain FrameInfo#colorInfo input color} {@linkplain ColorInfo#isTransferHdr is HDR},
+ *
If {@linkplain Format#colorInfo input color} {@linkplain ColorInfo#isTransferHdr is HDR},
* but {@code outputColorInfo} is SDR, then HDR to SDR tone-mapping is applied, and {@code
* outputColorInfo}'s {@link ColorInfo#colorTransfer} must be {@link C#COLOR_TRANSFER_GAMMA_2_2}
* or {@link C#COLOR_TRANSFER_SDR}. In this case, the actual output transfer function will be in
@@ -630,18 +628,19 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
*/
@Override
public void registerInputStream(
- @InputType int inputType, List effects, FrameInfo frameInfo) {
+ @InputType int inputType, Format format, List effects, long offsetToAddUs) {
// This method is only called after all samples in the current input stream are registered or
// queued.
DebugTraceUtil.logEvent(
COMPONENT_VFP,
EVENT_REGISTER_NEW_INPUT_STREAM,
- /* presentationTimeUs= */ frameInfo.offsetToAddUs,
+ /* presentationTimeUs= */ offsetToAddUs,
/* extraFormat= */ "InputType %s - %dx%d",
/* extraArgs...= */ getInputTypeString(inputType),
- frameInfo.width,
- frameInfo.height);
- nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
+ format.width,
+ format.height);
+ Format nextFormat = adjustForPixelWidthHeightRatio(format);
+ nextInputFrameInfo = new FrameInfo(nextFormat, offsetToAddUs);
try {
// Blocks until the previous input stream registration completes.
@@ -654,7 +653,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
synchronized (lock) {
// An input stream is pending until its effects are configured.
- InputStreamInfo pendingInputStreamInfo = new InputStreamInfo(inputType, effects, frameInfo);
+ InputStreamInfo pendingInputStreamInfo =
+ new InputStreamInfo(inputType, format, effects, offsetToAddUs);
if (!registeredFirstInputStream) {
registeredFirstInputStream = true;
inputStreamRegisteredCondition.close();
@@ -770,23 +770,24 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
}
/**
- * Expands the frame based on the {@link FrameInfo#pixelWidthHeightRatio} and returns a new {@link
- * FrameInfo} instance with scaled dimensions and {@link FrameInfo#pixelWidthHeightRatio} of
- * {@code 1}.
+ * Expands the frame based on the {@link Format#pixelWidthHeightRatio} and returns a new {@link
+ * Format} instance with scaled dimensions and {@link Format#pixelWidthHeightRatio} of {@code 1}.
*/
- private FrameInfo adjustForPixelWidthHeightRatio(FrameInfo frameInfo) {
- if (frameInfo.pixelWidthHeightRatio > 1f) {
- return new FrameInfo.Builder(frameInfo)
- .setWidth((int) (frameInfo.width * frameInfo.pixelWidthHeightRatio))
+ private Format adjustForPixelWidthHeightRatio(Format format) {
+ if (format.pixelWidthHeightRatio > 1f) {
+ return format
+ .buildUpon()
+ .setWidth((int) (format.width * format.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
- } else if (frameInfo.pixelWidthHeightRatio < 1f) {
- return new FrameInfo.Builder(frameInfo)
- .setHeight((int) (frameInfo.height / frameInfo.pixelWidthHeightRatio))
+ } else if (format.pixelWidthHeightRatio < 1f) {
+ return format
+ .buildUpon()
+ .setHeight((int) (format.height / format.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
} else {
- return frameInfo;
+ return format;
}
}
@@ -995,7 +996,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
*/
private void configureEffects(InputStreamInfo inputStreamInfo, boolean forceReconfigure)
throws VideoFrameProcessingException {
- checkColors(/* inputColorInfo= */ inputStreamInfo.frameInfo.colorInfo, outputColorInfo);
+ checkColors(
+ /* inputColorInfo= */ checkNotNull(inputStreamInfo.format.colorInfo), outputColorInfo);
if (forceReconfigure || !activeEffects.equals(inputStreamInfo.effects)) {
if (!intermediateGlShaderPrograms.isEmpty()) {
for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) {
@@ -1023,7 +1025,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
activeEffects.addAll(inputStreamInfo.effects);
}
- inputSwitcher.switchToInput(inputStreamInfo.inputType, inputStreamInfo.frameInfo);
+ inputSwitcher.switchToInput(
+ inputStreamInfo.inputType,
+ new FrameInfo(inputStreamInfo.format, inputStreamInfo.offsetToAddUs));
inputStreamRegisteredCondition.open();
synchronized (lock) {
if (onInputSurfaceReadyListener != null) {
@@ -1034,7 +1038,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
listenerExecutor.execute(
() ->
listener.onInputStreamRegistered(
- inputStreamInfo.inputType, inputStreamInfo.effects, inputStreamInfo.frameInfo));
+ inputStreamInfo.inputType, inputStreamInfo.format, inputStreamInfo.effects));
}
/** Checks that color configuration is valid for {@link DefaultVideoFrameProcessor}. */
@@ -1151,13 +1155,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private static final class InputStreamInfo {
public final @InputType int inputType;
+ public final Format format;
public final List effects;
- public final FrameInfo frameInfo;
+ public final long offsetToAddUs;
- public InputStreamInfo(@InputType int inputType, List effects, FrameInfo frameInfo) {
+ public InputStreamInfo(
+ @InputType int inputType, Format format, List effects, long offsetToAddUs) {
this.inputType = inputType;
+ this.format = format;
this.effects = effects;
- this.frameInfo = frameInfo;
+ this.offsetToAddUs = offsetToAddUs;
}
}
}
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/ExternalTextureManager.java b/libraries/effect/src/main/java/androidx/media3/effect/ExternalTextureManager.java
index c67eaa659e..739d7b9986 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/ExternalTextureManager.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/ExternalTextureManager.java
@@ -230,7 +230,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
repeatLastRegisteredFrame = automaticReregistration;
if (repeatLastRegisteredFrame) {
lastRegisteredFrame = inputFrameInfo;
- surfaceTexture.setDefaultBufferSize(inputFrameInfo.width, inputFrameInfo.height);
+ surfaceTexture.setDefaultBufferSize(
+ inputFrameInfo.format.width, inputFrameInfo.format.height);
}
}
@@ -407,7 +408,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs;
if (experimentalAdjustSurfaceTextureTransformationMatrix) {
removeSurfaceTextureScaleFromTransformMatrix(
- textureTransformMatrix, presentationTimeUs, currentFrame.width, currentFrame.height);
+ textureTransformMatrix,
+ presentationTimeUs,
+ currentFrame.format.width,
+ currentFrame.format.height);
}
checkNotNull(externalShaderProgram).setTextureTransformMatrix(textureTransformMatrix);
@@ -418,8 +422,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
externalTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
- currentFrame.width,
- currentFrame.height),
+ currentFrame.format.width,
+ currentFrame.format.height),
presentationTimeUs);
if (!repeatLastRegisteredFrame) {
checkStateNotNull(pendingFrames.remove());
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/InputSwitcher.java b/libraries/effect/src/main/java/androidx/media3/effect/InputSwitcher.java
index 82dd41ebac..628e3480f0 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/InputSwitcher.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/InputSwitcher.java
@@ -160,11 +160,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Activate the relevant input for the new input type.
Input input = inputs.get(newInputType);
- if (input.getInputColorInfo() == null
- || !newInputFrameInfo.colorInfo.equals(input.getInputColorInfo())) {
+ ColorInfo newInputColorInfo = checkNotNull(newInputFrameInfo.format.colorInfo);
+ if (input.getInputColorInfo() == null || !newInputColorInfo.equals(input.getInputColorInfo())) {
input.setSamplingGlShaderProgram(
- createSamplingShaderProgram(newInputFrameInfo.colorInfo, newInputType));
- input.setInputColorInfo(newInputFrameInfo.colorInfo);
+ createSamplingShaderProgram(newInputColorInfo, newInputType));
+ input.setInputColorInfo(newInputColorInfo);
}
input.setChainingListener(
new GatedChainingListenerWrapper(
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/MultipleInputVideoGraph.java b/libraries/effect/src/main/java/androidx/media3/effect/MultipleInputVideoGraph.java
index 8d4b696dc9..a97087522c 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/MultipleInputVideoGraph.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/MultipleInputVideoGraph.java
@@ -41,7 +41,7 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.FrameInfo;
+import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo;
@@ -165,8 +165,8 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
compositionVideoFrameProcessorInputStreamRegistrationCompleted = true;
queueCompositionOutputInternal();
}
@@ -249,7 +249,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
listenerExecutor,
new VideoFrameProcessor.Listener() {
// All of this listener's methods are called on the sharedExecutorService.
-
@Override
public void onError(VideoFrameProcessingException exception) {
handleVideoFrameProcessingException(exception);
@@ -367,12 +366,16 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
checkNotNull(compositionVideoFrameProcessor)
.registerInputStream(
INPUT_TYPE_TEXTURE_ID,
- compositionEffects,
// Pre-processing VideoFrameProcessors have converted the inputColor to outputColor
// already, so use outputColorInfo for the input color to the
// compositionVideoFrameProcessor.
- new FrameInfo.Builder(outputColorInfo, outputTexture.width, outputTexture.height)
- .build());
+ new Format.Builder()
+ .setColorInfo(outputColorInfo)
+ .setWidth(outputTexture.width)
+ .setHeight(outputTexture.height)
+ .build(),
+ compositionEffects,
+ /* offsetToAddUs= */ 0);
compositionVideoFrameProcessorInputStreamRegistered = true;
// Return as the VideoFrameProcessor rejects input textures until the input is registered.
return;
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/TexIdTextureManager.java b/libraries/effect/src/main/java/androidx/media3/effect/TexIdTextureManager.java
index e029660ad5..60c1576b8e 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/TexIdTextureManager.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/TexIdTextureManager.java
@@ -87,16 +87,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
inputTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
- frameInfo.width,
- frameInfo.height);
+ frameInfo.format.width,
+ frameInfo.format.height);
checkNotNull(frameConsumptionManager).queueInputFrame(inputTexture, presentationTimeUs);
DebugTraceUtil.logEvent(
COMPONENT_VFP,
EVENT_QUEUE_TEXTURE,
presentationTimeUs,
/* extraFormat= */ "%dx%d",
- /* extraArgs...= */ frameInfo.width,
- frameInfo.height);
+ /* extraArgs...= */ frameInfo.format.width,
+ frameInfo.format.height);
});
}
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/TextureManager.java b/libraries/effect/src/main/java/androidx/media3/effect/TextureManager.java
index 3676339f35..3b4a20dc55 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/TextureManager.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/TextureManager.java
@@ -22,6 +22,7 @@ import android.graphics.SurfaceTexture;
import android.view.Surface;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
+import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessingException;
@@ -111,7 +112,7 @@ import androidx.media3.common.util.TimestampIterator;
* frames to be registered, it may use the {@link FrameInfo} passed to {@link
* #registerInputFrame(FrameInfo)} instead of the one passed here.
*
- * Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
+ *
Pixels are expanded using the {@link Format#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.
*
* @param inputFrameInfo Information about the next input frame.
diff --git a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/PlaybackVideoGraphWrapper.java b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/PlaybackVideoGraphWrapper.java
index 4adadc6e43..f99eb74a97 100644
--- a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/PlaybackVideoGraphWrapper.java
+++ b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/PlaybackVideoGraphWrapper.java
@@ -36,7 +36,6 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
-import androidx.media3.common.FrameInfo;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
@@ -859,16 +858,13 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
ArrayList effects = new ArrayList<>(videoEffects);
Format inputFormat = checkNotNull(this.inputFormat);
+ inputFormat =
+ inputFormat
+ .buildUpon()
+ .setColorInfo(getAdjustedInputColorInfo(inputFormat.colorInfo))
+ .build();
checkStateNotNull(videoFrameProcessor)
- .registerInputStream(
- inputType,
- effects,
- new FrameInfo.Builder(
- getAdjustedInputColorInfo(inputFormat.colorInfo),
- inputFormat.width,
- inputFormat.height)
- .setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
- .build());
+ .registerInputStream(inputType, inputFormat, effects, /* offsetToAddUs= */ 0);
finalBufferPresentationTimeUs = C.TIME_UNSET;
}
diff --git a/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java b/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java
index e351f5e0c8..3bb273faed 100644
--- a/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java
+++ b/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java
@@ -38,7 +38,7 @@ import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.FrameInfo;
+import androidx.media3.common.Format;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
@@ -283,8 +283,8 @@ public final class VideoFrameProcessorTestRunner {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
- List effects,
- FrameInfo frameInfo) {
+ Format format,
+ List effects) {
videoFrameProcessorReadyCondition.open();
}
@@ -338,13 +338,14 @@ public final class VideoFrameProcessorTestRunner {
@Nullable ColorInfo colorInfo = MediaFormatUtil.getColorInfo(mediaFormat);
videoFrameProcessor.registerInputStream(
INPUT_TYPE_SURFACE,
- effects,
- new FrameInfo.Builder(
- colorInfo == null ? ColorInfo.SDR_BT709_LIMITED : colorInfo,
- mediaFormat.getInteger(MediaFormat.KEY_WIDTH),
- mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
+ new Format.Builder()
+ .setColorInfo(colorInfo == null ? ColorInfo.SDR_BT709_LIMITED : colorInfo)
+ .setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH))
+ .setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
- .build());
+ .build(),
+ effects,
+ /* offsetToAddUs= */ 0);
try {
awaitVideoFrameProcessorReady();
} catch (VideoFrameProcessingException e) {
@@ -374,11 +375,14 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessorReadyCondition.close();
videoFrameProcessor.registerInputStream(
INPUT_TYPE_BITMAP,
- effects,
- new FrameInfo.Builder(colorInfo, inputBitmap.getWidth(), inputBitmap.getHeight())
+ new Format.Builder()
+ .setColorInfo(colorInfo)
+ .setWidth(inputBitmap.getWidth())
+ .setHeight(inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
- .setOffsetToAddUs(offsetToAddUs)
- .build());
+ .build(),
+ effects,
+ offsetToAddUs);
awaitVideoFrameProcessorReady();
checkState(
videoFrameProcessor.queueInputBitmap(
@@ -396,10 +400,14 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessorReadyCondition.close();
videoFrameProcessor.registerInputStream(
INPUT_TYPE_BITMAP,
- effects,
- new FrameInfo.Builder(colorInfo, width, height)
+ new Format.Builder()
+ .setColorInfo(colorInfo)
+ .setWidth(width)
+ .setHeight(height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
- .build());
+ .build(),
+ effects,
+ /* offsetToAddUs= */ 0);
awaitVideoFrameProcessorReady();
for (Pair frame : frames) {
videoFrameProcessor.queueInputBitmap(frame.first, frame.second);
@@ -410,10 +418,14 @@ public final class VideoFrameProcessorTestRunner {
throws VideoFrameProcessingException {
videoFrameProcessor.registerInputStream(
INPUT_TYPE_TEXTURE_ID,
- effects,
- new FrameInfo.Builder(colorInfo, inputTexture.width, inputTexture.height)
+ new Format.Builder()
+ .setColorInfo(colorInfo)
+ .setWidth(inputTexture.width)
+ .setHeight(inputTexture.height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
- .build());
+ .build(),
+ effects,
+ /* offsetToAddUs= */ 0);
videoFrameProcessor.setOnInputFrameProcessedListener(
(texId, syncObject) -> {
try {
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
index b3a8b26f77..c033acbad3 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
@@ -27,12 +27,10 @@ import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
-import androidx.media3.common.FrameInfo;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessor;
-import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import com.google.common.collect.ImmutableList;
import java.util.List;
@@ -65,7 +63,7 @@ import java.util.concurrent.atomic.AtomicLong;
boolean isSurfaceAssetLoaderMediaItem = isMediaItemForSurfaceAssetLoader(editedMediaItem);
durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs);
if (decodedFormat != null) {
- Size decodedSize = getDecodedSize(decodedFormat);
+ decodedFormat = applyDecoderRotation(decodedFormat);
ImmutableList combinedEffects =
new ImmutableList.Builder()
.addAll(editedMediaItem.effects.videoEffects)
@@ -75,14 +73,9 @@ import java.util.concurrent.atomic.AtomicLong;
isSurfaceAssetLoaderMediaItem
? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION
: getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)),
+ decodedFormat,
combinedEffects,
- new FrameInfo.Builder(
- checkNotNull(decodedFormat.colorInfo),
- decodedSize.getWidth(),
- decodedSize.getHeight())
- .setPixelWidthHeightRatio(decodedFormat.pixelWidthHeightRatio)
- .setOffsetToAddUs(initialTimestampOffsetUs + mediaItemOffsetUs.get())
- .build());
+ /* offsetToAddUs= */ initialTimestampOffsetUs + mediaItemOffsetUs.get());
}
mediaItemOffsetUs.addAndGet(durationUs);
}
@@ -136,11 +129,17 @@ import java.util.concurrent.atomic.AtomicLong;
videoFrameProcessor.release();
}
- private static Size getDecodedSize(Format format) {
- // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
- int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
- int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
- return new Size(decodedWidth, decodedHeight);
+ private static Format applyDecoderRotation(Format format) {
+ // The decoder rotates encoded frames for display by format.rotationDegrees.
+ if (format.rotationDegrees % 180 == 0) {
+ return format;
+ }
+ return format
+ .buildUpon()
+ .setWidth(format.height)
+ .setHeight(format.width)
+ .setRotationDegrees(0)
+ .build();
}
private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(String sampleMimeType) {