Remove setters for streamOffset.

In addition to the changes in 3a5c4277a7

This change essentially reverts 30e5bc9837 (Merged Jul 2022).

From this CL on, `VideoFrameProcessor` takes in non-offset, monotonically
increasing timestamps. For example, with one 5s and one 10s video,

- `VideoFrameProcessor`'s input should start from 0
- On switching to the second video (10s), the timestamp of the first frame in
  the second video should be at 5s.

In ExoPlayer however, `streamOffset` is managed differently and thus needs
correction before sending the frames to `VideoFrameProcessor`:
- The timestamp of the first video is offset by a large int, so the first frame
  of the first media item has timestamp (assuming) 10000000000000000
- The last frame of the first media item has 10000005000000000
- At this point the stream off set is updated to 10000005000000000
- The pts of the first frame of the second video starts from 0 again.

PiperOrigin-RevId: 523444236
This commit is contained in:
claincly 2023-04-11 18:40:43 +01:00 committed by Rohit Singh
parent acdb7ee921
commit 32be985c7c
8 changed files with 38 additions and 95 deletions

View file

@ -30,7 +30,6 @@ public class FrameInfo {
private int width;
private int height;
private float pixelWidthHeightRatio;
private long streamOffsetUs;
private long offsetToAddUs;
/**
@ -50,7 +49,6 @@ public class FrameInfo {
width = frameInfo.width;
height = frameInfo.height;
pixelWidthHeightRatio = frameInfo.pixelWidthHeightRatio;
streamOffsetUs = frameInfo.streamOffsetUs;
offsetToAddUs = frameInfo.offsetToAddUs;
}
@ -79,17 +77,6 @@ public class FrameInfo {
return this;
}
/**
* Sets the {@linkplain FrameInfo#streamOffsetUs stream offset}, in microseconds.
*
* <p>The default value is {@code 0}.
*/
@CanIgnoreReturnValue
public Builder setStreamOffsetUs(long streamOffsetUs) {
this.streamOffsetUs = streamOffsetUs;
return this;
}
/**
* Sets the {@linkplain FrameInfo#offsetToAddUs offset to add} to the frame presentation
* timestamp, in microseconds.
@ -104,7 +91,7 @@ public class FrameInfo {
/** Builds a {@link FrameInfo} instance. */
public FrameInfo build() {
return new FrameInfo(width, height, pixelWidthHeightRatio, streamOffsetUs, offsetToAddUs);
return new FrameInfo(width, height, pixelWidthHeightRatio, offsetToAddUs);
}
}
@ -114,16 +101,6 @@ public class FrameInfo {
public final int height;
/** The ratio of width over height for each pixel. */
public final float pixelWidthHeightRatio;
/**
* An offset in microseconds that is part of the input timestamps and should be ignored for
* processing but added back to the output timestamps.
*
* <p>The offset stays constant within a stream. If the first timestamp of the next stream is less
* than or equal to the last timestamp of the current stream (including the {@linkplain
* #offsetToAddUs} offset to add), the stream offset must be updated between the streams to ensure
* that the offset frame timestamps are always monotonically increasing.
*/
public final long streamOffsetUs;
/**
* The offset that must be added to the frame presentation timestamp, in microseconds.
*
@ -134,15 +111,13 @@ public class FrameInfo {
// TODO(b/227624622): Add color space information for HDR.
private FrameInfo(
int width, int height, float pixelWidthHeightRatio, long streamOffsetUs, long offsetToAddUs) {
private FrameInfo(int width, int height, float pixelWidthHeightRatio, long offsetToAddUs) {
checkArgument(width > 0, "width must be positive, but is: " + width);
checkArgument(height > 0, "height must be positive, but is: " + height);
this.width = width;
this.height = height;
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
this.streamOffsetUs = streamOffsetUs;
this.offsetToAddUs = offsetToAddUs;
}
}

View file

@ -172,10 +172,6 @@ public interface VideoFrameProcessor {
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.
*
* <p>The caller should update {@link FrameInfo#streamOffsetUs} when switching to an input stream
* whose first frame timestamp is less than or equal to the last timestamp received. This stream
* offset should ensure that frame timestamps are monotonically increasing.
*
* <p>Can be called on any thread.
*/
void setInputFrameInfo(FrameInfo inputFrameInfo);

View file

@ -258,12 +258,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms;
/**
* Offset compared to original media presentation time that has been added to incoming frame
* timestamps, in microseconds.
*/
private long previousStreamOffsetUs;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
@ -300,7 +294,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
allShaderPrograms = shaderPrograms;
previousStreamOffsetUs = C.TIME_UNSET;
}
/** Returns the task executor that runs video frame processing tasks. */
@ -342,11 +335,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
if (nextInputFrameInfo.streamOffsetUs != previousStreamOffsetUs) {
finalShaderProgramWrapper.appendStream(nextInputFrameInfo.streamOffsetUs);
previousStreamOffsetUs = nextInputFrameInfo.streamOffsetUs;
}
}
@Override

View file

@ -63,8 +63,6 @@ import java.util.concurrent.atomic.AtomicInteger;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs;
/**
* Creates a new instance.
*
@ -90,7 +88,6 @@ import java.util.concurrent.atomic.AtomicInteger;
textureTransformMatrix = new float[16];
pendingFrames = new ConcurrentLinkedQueue<>();
externalShaderProgramInputCapacity = new AtomicInteger();
previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener(
unused ->
videoFrameProcessingTaskExecutor.submit(
@ -225,15 +222,8 @@ import java.util.concurrent.atomic.AtomicInteger;
externalShaderProgram.setTextureTransformMatrix(textureTransformMatrix);
long frameTimeNs = surfaceTexture.getTimestamp();
long offsetToAddUs = currentFrame.offsetToAddUs;
long streamOffsetUs = currentFrame.streamOffsetUs;
if (streamOffsetUs != previousStreamOffsetUs) {
if (previousStreamOffsetUs != C.TIME_UNSET) {
externalShaderProgram.signalEndOfCurrentInputStream();
}
previousStreamOffsetUs = streamOffsetUs;
}
// Correct the presentation time so that GlShaderPrograms don't see the stream offset.
long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs - streamOffsetUs;
long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs;
externalShaderProgram.queueInputFrame(
new GlTextureInfo(
externalTexId,

View file

@ -17,7 +17,6 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context;
import android.opengl.EGL14;
@ -81,7 +80,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final Executor videoFrameProcessorListenerExecutor;
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
@Nullable private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
@ -141,7 +139,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.textureOutputListener = textureOutputListener;
textureTransformMatrix = GlUtil.create4x4IdentityMatrix();
streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
inputListener = new InputListener() {};
availableFrames = new ConcurrentLinkedQueue<>();
}
@ -175,27 +172,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void signalEndOfCurrentInputStream() {
frameProcessingStarted = true;
if (streamOffsetUsQueue.isEmpty()) {
// No input stream to end.
return;
}
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
}
/**
* Signals that there will be another input stream after all previously appended input streams
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
*
* <p>This method does not need to be called on the GL thread, but the caller must ensure that
* stream offsets are appended in the correct order.
*
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
*/
public void appendStream(long streamOffsetUs) {
streamOffsetUsQueue.add(streamOffsetUs);
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
// Methods that must be called on the GL thread.
@ -203,14 +180,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
frameProcessingStarted = true;
long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs;
videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
() -> videoFrameProcessorListener.onOutputFrameAvailable(presentationTimeUs));
if (releaseFramesAutomatically) {
renderFrame(
inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
renderFrame(inputTexture, presentationTimeUs, /* releaseTimeNs= */ presentationTimeUs * 1000);
} else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
}

View file

@ -181,9 +181,7 @@ public interface GlShaderProgram {
* Notifies the {@code GlShaderProgram} that no further input frames belonging to the current
* input stream will be queued.
*
* <p>Input frames that are queued after this method is called belong to a different input stream,
* so presentation timestamps may reset to start from a smaller presentation timestamp than the
* last frame of the previous input stream.
* <p>Input frames that are queued after this method is called belong to a different input stream.
*/
void signalEndOfCurrentInputStream();

View file

@ -54,6 +54,7 @@ import java.util.concurrent.Executor;
@Override
public void setInputListener(InputListener inputListener) {
// TODO(b/277726418) Fix over-reported input capacity.
copyGlShaderProgram.setInputListener(inputListener);
wrappedGlShaderProgram.setInputListener(inputListener);
}
@ -78,6 +79,8 @@ import java.util.concurrent.Executor;
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
// TODO(b/277726418) Properly report shader program capacity when switching from wrapped shader
// program to copying shader program.
if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) {
pendingWrappedGlShaderProgramFrames++;
wrappedGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs);
@ -102,7 +105,10 @@ import java.util.concurrent.Executor;
@Override
public void signalEndOfCurrentInputStream() {
copyGlShaderProgram.signalEndOfCurrentInputStream();
// TODO(b/277726418) Properly handle EOS reporting.
// Only sending EOS signal along the wrapped GL shader program path is semantically incorrect,
// but it ensures the wrapped shader program receives the EOS signal. On the other hand, the
// copy shader program does not need special EOS handling.
wrappedGlShaderProgram.signalEndOfCurrentInputStream();
}

View file

@ -967,7 +967,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException {
if (!videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.maybeEnable(format);
videoFrameProcessorManager.maybeEnable(format, getOutputStreamOffsetUs());
}
}
@ -1530,7 +1530,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// input surface, which is not a SurfaceView.
long releaseTimeNs =
videoFrameProcessorManager.isEnabled()
? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000
? videoFrameProcessorManager.getCorrectedFramePresentationTimeUs(
presentationTimeUs, getOutputStreamOffsetUs())
* 1000
: System.nanoTime();
if (notifyFrameMetadataListener) {
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
@ -1893,6 +1895,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** The presentation time, after which the listener should be notified about the size change. */
private long pendingOutputSizeChangeNotificationTimeUs;
private long initialStreamOffsetUs;
/** Creates a new instance. */
public VideoFrameProcessorManager(
VideoFrameReleaseHelper frameReleaseHelper,
@ -1906,6 +1910,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
lastCodecBufferPresentationTimestampUs = C.TIME_UNSET;
processedFrameSize = VideoSize.UNKNOWN;
pendingOutputSizeChangeNotificationTimeUs = C.TIME_UNSET;
initialStreamOffsetUs = C.TIME_UNSET;
}
/** Sets the {@linkplain Effect video effects}. */
@ -1965,7 +1970,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed.
*/
@CanIgnoreReturnValue
public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException {
public boolean maybeEnable(Format inputFormat, long initialStreamOffsetUs)
throws ExoPlaybackException {
checkState(!isEnabled());
if (!canEnableFrameProcessing) {
return false;
@ -2061,6 +2067,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException();
}
});
this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) {
throw renderer.createRendererException(
e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
@ -2079,6 +2086,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return true;
}
public long getCorrectedFramePresentationTimeUs(
long framePresentationTimeUs, long currentStreamOffsetUs) {
// VideoFrameProcessor takes in frames with monotonically increasing, non-offset frame
// timestamps. That is, with two ten-second long videos, the first frame of the second video
// should bear a timestamp of 10s seen from VideoFrameProcessor; while in ExoPlayer, the
// timestamp of the said frame would be 0s, but the streamOffset is incremented 10s to include
// the duration of the first video. Thus this correction is need to correct for the different
// handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
checkState(initialStreamOffsetUs != C.TIME_UNSET);
return framePresentationTimeUs + currentStreamOffsetUs - initialStreamOffsetUs;
}
/**
* Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link
* VideoFrameProcessor}.
@ -2133,7 +2152,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
.setInputFrameInfo(
new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
.setStreamOffsetUs(renderer.getOutputStreamOffsetUs())
.build());
this.inputFormat = inputFormat;
@ -2210,7 +2228,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
checkStateNotNull(videoFrameProcessor);
while (!processedFramesTimestampsUs.isEmpty()) {
boolean isStarted = renderer.getState() == STATE_STARTED;
long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
long framePresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
long bufferPresentationTimeUs = framePresentationTimeUs + initialStreamOffsetUs;
long earlyUs =
renderer.calculateEarlyTimeUs(
positionUs,
@ -2252,8 +2271,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
&& bufferPresentationTimeUs > pendingFrameFormats.peek().first) {
currentFrameFormat = pendingFrameFormats.remove();
}
long framePresentationTimeUs =
bufferPresentationTimeUs - renderer.getOutputStreamOffsetUs();
renderer.notifyFrameMetadataListener(
framePresentationTimeUs, adjustedFrameReleaseTimeNs, currentFrameFormat.second);
if (pendingOutputSizeChangeNotificationTimeUs >= bufferPresentationTimeUs) {