diff --git a/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java b/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java index 3c5201a1a4..2e3fdf91b9 100644 --- a/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java +++ b/libraries/common/src/main/java/androidx/media3/common/FrameInfo.java @@ -30,7 +30,6 @@ public class FrameInfo { private int width; private int height; private float pixelWidthHeightRatio; - private long streamOffsetUs; private long offsetToAddUs; /** @@ -50,7 +49,6 @@ public class FrameInfo { width = frameInfo.width; height = frameInfo.height; pixelWidthHeightRatio = frameInfo.pixelWidthHeightRatio; - streamOffsetUs = frameInfo.streamOffsetUs; offsetToAddUs = frameInfo.offsetToAddUs; } @@ -79,17 +77,6 @@ public class FrameInfo { return this; } - /** - * Sets the {@linkplain FrameInfo#streamOffsetUs stream offset}, in microseconds. - * - *
The default value is {@code 0}. - */ - @CanIgnoreReturnValue - public Builder setStreamOffsetUs(long streamOffsetUs) { - this.streamOffsetUs = streamOffsetUs; - return this; - } - /** * Sets the {@linkplain FrameInfo#offsetToAddUs offset to add} to the frame presentation * timestamp, in microseconds. @@ -104,7 +91,7 @@ public class FrameInfo { /** Builds a {@link FrameInfo} instance. */ public FrameInfo build() { - return new FrameInfo(width, height, pixelWidthHeightRatio, streamOffsetUs, offsetToAddUs); + return new FrameInfo(width, height, pixelWidthHeightRatio, offsetToAddUs); } } @@ -114,16 +101,6 @@ public class FrameInfo { public final int height; /** The ratio of width over height for each pixel. */ public final float pixelWidthHeightRatio; - /** - * An offset in microseconds that is part of the input timestamps and should be ignored for - * processing but added back to the output timestamps. - * - *
The offset stays constant within a stream. If the first timestamp of the next stream is less - * than or equal to the last timestamp of the current stream (including the {@linkplain - * #offsetToAddUs} offset to add), the stream offset must be updated between the streams to ensure - * that the offset frame timestamps are always monotonically increasing. - */ - public final long streamOffsetUs; /** * The offset that must be added to the frame presentation timestamp, in microseconds. * @@ -134,15 +111,13 @@ public class FrameInfo { // TODO(b/227624622): Add color space information for HDR. - private FrameInfo( - int width, int height, float pixelWidthHeightRatio, long streamOffsetUs, long offsetToAddUs) { + private FrameInfo(int width, int height, float pixelWidthHeightRatio, long offsetToAddUs) { checkArgument(width > 0, "width must be positive, but is: " + width); checkArgument(height > 0, "height must be positive, but is: " + height); this.width = width; this.height = height; this.pixelWidthHeightRatio = pixelWidthHeightRatio; - this.streamOffsetUs = streamOffsetUs; this.offsetToAddUs = offsetToAddUs; } } diff --git a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java index a13c89d45c..def1d57b05 100644 --- a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java +++ b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java @@ -172,10 +172,6 @@ public interface VideoFrameProcessor { *
Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output * frames' pixels have a ratio of 1. * - *
The caller should update {@link FrameInfo#streamOffsetUs} when switching to an input stream - * whose first frame timestamp is less than or equal to the last timestamp received. This stream - * offset should ensure that frame timestamps are monotonically increasing. - * *
Can be called on any thread.
*/
void setInputFrameInfo(FrameInfo inputFrameInfo);
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
index d026bb5cc2..d8361e9706 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java
@@ -258,12 +258,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList This method does not need to be called on the GL thread, but the caller must ensure that
- * stream offsets are appended in the correct order.
- *
- * @param streamOffsetUs The presentation timestamp offset, in microseconds.
- */
- public void appendStream(long streamOffsetUs) {
- streamOffsetUsQueue.add(streamOffsetUs);
+ videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
// Methods that must be called on the GL thread.
@@ -203,14 +180,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
frameProcessingStarted = true;
- long streamOffsetUs =
- checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
- long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs;
videoFrameProcessorListenerExecutor.execute(
- () -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
+ () -> videoFrameProcessorListener.onOutputFrameAvailable(presentationTimeUs));
if (releaseFramesAutomatically) {
- renderFrame(
- inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
+ renderFrame(inputTexture, presentationTimeUs, /* releaseTimeNs= */ presentationTimeUs * 1000);
} else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
}
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/GlShaderProgram.java b/libraries/effect/src/main/java/androidx/media3/effect/GlShaderProgram.java
index d47fa824df..9299c9fe6a 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/GlShaderProgram.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/GlShaderProgram.java
@@ -181,9 +181,7 @@ public interface GlShaderProgram {
* Notifies the {@code GlShaderProgram} that no further input frames belonging to the current
* input stream will be queued.
*
- * Input frames that are queued after this method is called belong to a different input stream,
- * so presentation timestamps may reset to start from a smaller presentation timestamp than the
- * last frame of the previous input stream.
+ * Input frames that are queued after this method is called belong to a different input stream.
*/
void signalEndOfCurrentInputStream();
diff --git a/libraries/effect/src/main/java/androidx/media3/effect/TimestampWrapperShaderProgram.java b/libraries/effect/src/main/java/androidx/media3/effect/TimestampWrapperShaderProgram.java
index 585b7f49c7..d7fb91dcd1 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/TimestampWrapperShaderProgram.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/TimestampWrapperShaderProgram.java
@@ -54,6 +54,7 @@ import java.util.concurrent.Executor;
@Override
public void setInputListener(InputListener inputListener) {
+ // TODO(b/277726418) Fix over-reported input capacity.
copyGlShaderProgram.setInputListener(inputListener);
wrappedGlShaderProgram.setInputListener(inputListener);
}
@@ -78,6 +79,8 @@ import java.util.concurrent.Executor;
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
+ // TODO(b/277726418) Properly report shader program capacity when switching from wrapped shader
+ // program to copying shader program.
if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) {
pendingWrappedGlShaderProgramFrames++;
wrappedGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs);
@@ -102,7 +105,10 @@ import java.util.concurrent.Executor;
@Override
public void signalEndOfCurrentInputStream() {
- copyGlShaderProgram.signalEndOfCurrentInputStream();
+ // TODO(b/277726418) Properly handle EOS reporting.
+ // Only sending EOS signal along the wrapped GL shader program path is semantically incorrect,
+ // but it ensures the wrapped shader program receives the EOS signal. On the other hand, the
+ // copy shader program does not need special EOS handling.
wrappedGlShaderProgram.signalEndOfCurrentInputStream();
}
diff --git a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java
index 6932aeef3f..e6606429c2 100644
--- a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java
+++ b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java
@@ -967,7 +967,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException {
if (!videoFrameProcessorManager.isEnabled()) {
- videoFrameProcessorManager.maybeEnable(format);
+ videoFrameProcessorManager.maybeEnable(format, getOutputStreamOffsetUs());
}
}
@@ -1530,7 +1530,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// input surface, which is not a SurfaceView.
long releaseTimeNs =
videoFrameProcessorManager.isEnabled()
- ? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000
+ ? videoFrameProcessorManager.getCorrectedFramePresentationTimeUs(
+ presentationTimeUs, getOutputStreamOffsetUs())
+ * 1000
: System.nanoTime();
if (notifyFrameMetadataListener) {
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
@@ -1893,6 +1895,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** The presentation time, after which the listener should be notified about the size change. */
private long pendingOutputSizeChangeNotificationTimeUs;
+ private long initialStreamOffsetUs;
+
/** Creates a new instance. */
public VideoFrameProcessorManager(
VideoFrameReleaseHelper frameReleaseHelper,
@@ -1906,6 +1910,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
lastCodecBufferPresentationTimestampUs = C.TIME_UNSET;
processedFrameSize = VideoSize.UNKNOWN;
pendingOutputSizeChangeNotificationTimeUs = C.TIME_UNSET;
+ initialStreamOffsetUs = C.TIME_UNSET;
}
/** Sets the {@linkplain Effect video effects}. */
@@ -1965,7 +1970,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed.
*/
@CanIgnoreReturnValue
- public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException {
+ public boolean maybeEnable(Format inputFormat, long initialStreamOffsetUs)
+ throws ExoPlaybackException {
checkState(!isEnabled());
if (!canEnableFrameProcessing) {
return false;
@@ -2061,6 +2067,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException();
}
});
+ this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) {
throw renderer.createRendererException(
e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
@@ -2079,6 +2086,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return true;
}
+ public long getCorrectedFramePresentationTimeUs(
+ long framePresentationTimeUs, long currentStreamOffsetUs) {
+ // VideoFrameProcessor takes in frames with monotonically increasing, non-offset frame
+ // timestamps. That is, with two ten-second long videos, the first frame of the second video
+ // should bear a timestamp of 10s seen from VideoFrameProcessor; while in ExoPlayer, the
+ // timestamp of the said frame would be 0s, but the streamOffset is incremented 10s to include
+ // the duration of the first video. Thus this correction is need to correct for the different
+ // handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
+ checkState(initialStreamOffsetUs != C.TIME_UNSET);
+ return framePresentationTimeUs + currentStreamOffsetUs - initialStreamOffsetUs;
+ }
+
/**
* Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link
* VideoFrameProcessor}.
@@ -2133,7 +2152,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
.setInputFrameInfo(
new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
- .setStreamOffsetUs(renderer.getOutputStreamOffsetUs())
.build());
this.inputFormat = inputFormat;
@@ -2210,7 +2228,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
checkStateNotNull(videoFrameProcessor);
while (!processedFramesTimestampsUs.isEmpty()) {
boolean isStarted = renderer.getState() == STATE_STARTED;
- long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
+ long framePresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
+ long bufferPresentationTimeUs = framePresentationTimeUs + initialStreamOffsetUs;
long earlyUs =
renderer.calculateEarlyTimeUs(
positionUs,
@@ -2252,8 +2271,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
&& bufferPresentationTimeUs > pendingFrameFormats.peek().first) {
currentFrameFormat = pendingFrameFormats.remove();
}
- long framePresentationTimeUs =
- bufferPresentationTimeUs - renderer.getOutputStreamOffsetUs();
renderer.notifyFrameMetadataListener(
framePresentationTimeUs, adjustedFrameReleaseTimeNs, currentFrameFormat.second);
if (pendingOutputSizeChangeNotificationTimeUs >= bufferPresentationTimeUs) {