Unify timestamp handling

Before this change, the timestamps output from composition playback is offset
with the renderer offset. After this change, the offset is removed and the
timestamp behaviour converges with Transformer, that is, the timestamps of
video/images frames will follow that of the composition. For example, with a
composition of two 10-s items, clipping the first with 2s at the start, the
timestamp of the first frame in the second item, will be 8s.

PiperOrigin-RevId: 641121358
This commit is contained in:
claincly 2024-06-06 21:34:16 -07:00 committed by Copybara-Service
parent ec0af5a7e0
commit 38a7229d96
8 changed files with 226 additions and 227 deletions

View file

@ -36,12 +36,12 @@ public interface VideoGraph {
void onOutputSizeChanged(int width, int height);
/**
* Called when an output frame with the given {@code presentationTimeUs} becomes available for
* rendering.
* Called when an output frame with the given {@code framePresentationTimeUs} becomes available
* for rendering.
*
* @param presentationTimeUs The presentation time of the frame, in microseconds.
* @param framePresentationTimeUs The presentation time of the frame, in microseconds.
*/
void onOutputFrameAvailableForRendering(long presentationTimeUs);
void onOutputFrameAvailableForRendering(long framePresentationTimeUs);
/**
* Called after the {@link VideoGraph} has rendered its final output frame.

View file

@ -213,6 +213,13 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
private int pendingFlushCount;
private @State int state;
/**
* Converts the buffer timestamp (the player position, with renderer offset) to the composition
* timestamp, in microseconds. The composition time starts from zero, add this adjustment to
* buffer timestamp to get the composition time.
*/
private long bufferTimestampAdjustmentUs;
private CompositingVideoSinkProvider(Builder builder) {
context = builder.context;
videoSinkImpl = new VideoSinkImpl(context);
@ -302,12 +309,15 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
if (pendingFlushCount > 0) {
// Ignore available frames while the sink provider is flushing
return;
}
videoFrameRenderControl.onOutputFrameAvailableForRendering(presentationTimeUs);
// The frame presentation time is relative to the start of the Composition and without the
// renderer offset
videoFrameRenderControl.onOutputFrameAvailableForRendering(
framePresentationTimeUs - bufferTimestampAdjustmentUs);
}
@Override
@ -443,7 +453,9 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
videoFrameRenderControl.setPlaybackSpeed(speed);
}
private void onStreamOffsetChange(long bufferPresentationTimeUs, long streamOffsetUs) {
private void onStreamOffsetChange(
long bufferTimestampAdjustmentUs, long bufferPresentationTimeUs, long streamOffsetUs) {
this.bufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs;
videoFrameRenderControl.onStreamOffsetChange(bufferPresentationTimeUs, streamOffsetUs);
}
@ -466,6 +478,7 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
@Nullable private Format inputFormat;
private @InputType int inputType;
private long inputStreamOffsetUs;
private long inputBufferTimestampAdjustmentUs;
private boolean pendingInputStreamOffsetChange;
/** The buffer presentation time, in microseconds, of the final frame in the stream. */
@ -562,7 +575,8 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
public boolean isEnded() {
return isInitialized()
&& finalBufferPresentationTimeUs != C.TIME_UNSET
&& CompositingVideoSinkProvider.this.hasReleasedFrame(finalBufferPresentationTimeUs);
&& CompositingVideoSinkProvider.this.hasReleasedFrame(
finalBufferPresentationTimeUs + inputBufferTimestampAdjustmentUs);
}
@Override
@ -643,9 +657,14 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
}
@Override
public void setStreamOffsetUs(long streamOffsetUs) {
pendingInputStreamOffsetChange = inputStreamOffsetUs != streamOffsetUs;
public void setStreamOffsetAndAdjustmentUs(
long streamOffsetUs, long bufferTimestampAdjustmentUs) {
// Ors because this method could be called multiple times on a stream offset change.
pendingInputStreamOffsetChange |=
inputStreamOffsetUs != streamOffsetUs
|| inputBufferTimestampAdjustmentUs != bufferTimestampAdjustmentUs;
inputStreamOffsetUs = streamOffsetUs;
inputBufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs;
}
@Override
@ -688,13 +707,17 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
// timestamp of the said frame would be 0s, but the streamOffset is incremented 10s to include
// the duration of the first video. Thus this correction is need to correct for the different
// handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
long bufferPresentationTimeUs = framePresentationTimeUs + inputStreamOffsetUs;
//
// inputBufferTimestampAdjustmentUs adjusts the frame presentation time (which is relative to
// the start of a composition, to the buffer timestamp that is offset, and correspond to the
// player position).
long bufferPresentationTimeUs = framePresentationTimeUs - inputBufferTimestampAdjustmentUs;
maybeSetStreamOffsetChange(bufferPresentationTimeUs);
lastBufferPresentationTimeUs = bufferPresentationTimeUs;
if (isLastFrame) {
finalBufferPresentationTimeUs = bufferPresentationTimeUs;
}
return bufferPresentationTimeUs * 1000;
return framePresentationTimeUs * 1000;
}
@Override
@ -705,18 +728,14 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
return false;
}
// The sink takes bitmaps with monotonically increasing, non-offset frame timestamps. Ensure
// the produced timestamps include the stream offset.
OffsetTimestampIterator offsetTimestampIterator =
new OffsetTimestampIterator(timestampIterator, inputStreamOffsetUs);
if (!checkStateNotNull(videoFrameProcessor)
.queueInputBitmap(inputBitmap, offsetTimestampIterator)) {
.queueInputBitmap(inputBitmap, timestampIterator)) {
return false;
}
// Create a copy of iterator because we need to take the next timestamp but we must not alter
// the state of the iterator.
TimestampIterator copyTimestampIterator = offsetTimestampIterator.copyOf();
TimestampIterator copyTimestampIterator = timestampIterator.copyOf();
long bufferPresentationTimeUs = copyTimestampIterator.next();
long lastBufferPresentationTimeUs = copyTimestampIterator.getLastTimestampUs();
checkState(lastBufferPresentationTimeUs != C.TIME_UNSET);
@ -746,7 +765,8 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
private void maybeSetStreamOffsetChange(long bufferPresentationTimeUs) {
if (pendingInputStreamOffsetChange) {
CompositingVideoSinkProvider.this.onStreamOffsetChange(
/* bufferPresentationTimeUs= */ bufferPresentationTimeUs,
inputBufferTimestampAdjustmentUs,
bufferPresentationTimeUs,
/* streamOffsetUs= */ inputStreamOffsetUs);
pendingInputStreamOffsetChange = false;
}
@ -850,7 +870,10 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
@Override
public void renderFrame(
long renderTimeNs, long presentationTimeUs, long streamOffsetUs, boolean isFirstFrame) {
long renderTimeNs,
long bufferPresentationTimeUs,
long streamOffsetUs,
boolean isFirstFrame) {
if (isFirstFrame && currentSurfaceAndSize != null) {
for (CompositingVideoSinkProvider.Listener listener : listeners) {
listener.onFirstFrameRendered(CompositingVideoSinkProvider.this);
@ -861,7 +884,7 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
// onVideoSizeChanged is announced after the first frame is available for rendering.
Format format = outputFormat == null ? new Format.Builder().build() : outputFormat;
videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
/* presentationTimeUs= */ presentationTimeUs - streamOffsetUs,
/* presentationTimeUs= */ bufferPresentationTimeUs,
checkStateNotNull(clock).nanoTime(),
format,
/* mediaFormat= */ null);
@ -1010,43 +1033,4 @@ public final class CompositingVideoSinkProvider implements VideoSinkProvider, Vi
}
}
}
/**
* A {@link TimestampIterator} that wraps another {@link TimestampIterator} and adds an offset to
* the returnd timestamps.
*/
private static class OffsetTimestampIterator implements TimestampIterator {
private final TimestampIterator timestampIterator;
private final long offset;
public OffsetTimestampIterator(TimestampIterator timestampIterator, long offset) {
this.timestampIterator = timestampIterator;
this.offset = offset;
}
@Override
public boolean hasNext() {
return timestampIterator.hasNext();
}
@Override
public long next() {
return offset + timestampIterator.next();
}
@Override
public long getLastTimestampUs() {
long last = timestampIterator.getLastTimestampUs();
if (last != C.TIME_UNSET) {
last += offset;
}
return last;
}
@Override
public TimestampIterator copyOf() {
return new OffsetTimestampIterator(timestampIterator.copyOf(), offset);
}
}
}

View file

@ -658,7 +658,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
// Flush the video sink first to ensure it stops reading textures that will be owned by
// MediaCodec once the codec is flushed.
videoSink.flush();
videoSink.setStreamOffsetUs(getOutputStreamOffsetUs());
videoSink.setStreamOffsetAndAdjustmentUs(
getOutputStreamOffsetUs(), getBufferTimestampAdjustmentUs());
super.onPositionReset(positionUs, joining);
videoFrameReleaseControl.reset();
if (joining) {
@ -1352,7 +1353,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
throw createRendererException(
e, e.format, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
}
long releaseTimeNs = videoSink.registerInputFrame(presentationTimeUs, isLastBuffer);
long releaseTimeNs =
videoSink.registerInputFrame(
bufferPresentationTimeUs + getBufferTimestampAdjustmentUs(), isLastBuffer);
if (releaseTimeNs == C.TIME_UNSET) {
return false;
}
@ -1384,6 +1388,15 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
}
}
/**
* Returns the timestamp that is added to the buffer presentation time (the player decoding
* position) to the frame presentation time, in microseconds.
*/
protected long getBufferTimestampAdjustmentUs() {
// TODO - b/333514379: Make effect-enabled effect timestamp start from zero.
return 0;
}
private boolean maybeReleaseFrame(
MediaCodecAdapter codec, int bufferIndex, long presentationTimeUs, Format format) {
long releaseTimeNs = videoFrameReleaseInfo.getReleaseTimeNs();
@ -1461,7 +1474,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
protected void onProcessedStreamChange() {
super.onProcessedStreamChange();
if (shouldUseVideoSink) {
videoSink.setStreamOffsetUs(getOutputStreamOffsetUs());
videoSink.setStreamOffsetAndAdjustmentUs(
getOutputStreamOffsetUs(), getBufferTimestampAdjustmentUs());
} else {
videoFrameReleaseControl.onProcessedStreamChange();
}

View file

@ -186,10 +186,14 @@ public interface VideoSink {
void setPendingVideoEffects(List<Effect> videoEffects);
/**
* Sets the offset, in microseconds, that is added to the video frames presentation timestamps
* from the player.
* Sets the stream offset and buffer time adjustment, in microseconds.
*
* @param streamOffsetUs The offset that is added to the video frames presentation timestamps from
* the player.
* @param bufferTimestampAdjustmentUs The timestamp adjustment to convert the player position to
* the frame presentation timestamp.
*/
void setStreamOffsetUs(long streamOffsetUs);
void setStreamOffsetAndAdjustmentUs(long streamOffsetUs, long bufferTimestampAdjustmentUs);
/** Sets the output surface info. */
void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution);

View file

@ -15,7 +15,6 @@
*/
package androidx.media3.exoplayer.video;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.when;
@ -62,23 +61,6 @@ public final class CompositingVideoSinkProviderTest {
() -> sink.initialize(new Format.Builder().build(), Clock.DEFAULT));
}
@Test
public void setSinkStreamOffsetUs_frameReleaseTimesAreAdjusted()
throws VideoSink.VideoSinkException {
VideoSink videoSink = createCompositingVideoSinkProvider().getSink();
videoSink.initialize(new Format.Builder().build(), Clock.DEFAULT);
videoSink.registerInputStream(
VideoSink.INPUT_TYPE_SURFACE, new Format.Builder().setWidth(640).setHeight(480).build());
assertThat(videoSink.registerInputFrame(/* framePresentationTimeUs= */ 0, false)).isEqualTo(0);
videoSink.setStreamOffsetUs(1_000);
assertThat(videoSink.registerInputFrame(/* framePresentationTimeUs= */ 0, false))
.isEqualTo(1_000_000);
videoSink.setStreamOffsetUs(2_000);
assertThat(videoSink.registerInputFrame(/* framePresentationTimeUs= */ 0, false))
.isEqualTo(2_000_000);
}
private static CompositingVideoSinkProvider createCompositingVideoSinkProvider() {
Context context = ApplicationProvider.getApplicationContext();
return new CompositingVideoSinkProvider.Builder(context, createVideoFrameReleaseControl())

View file

@ -118,67 +118,67 @@ public class CompositionPlayerSeekTest {
ImmutableList<Long> timestampsUsOfOneSequence =
ImmutableList.of(
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
1000000500500L,
1000000533866L,
1000000567233L,
1000000600600L,
1000000633966L,
1000000667333L,
1000000700700L,
1000000734066L,
1000000767433L,
1000000800800L,
1000000834166L,
1000000867533L,
1000000900900L,
1000000934266L,
1000000967633L,
0L,
33366L,
66733L,
100100L,
133466L,
166833L,
200200L,
233566L,
266933L,
300300L,
333666L,
367033L,
400400L,
433766L,
467133L,
500500L,
533866L,
567233L,
600600L,
633966L,
667333L,
700700L,
734066L,
767433L,
800800L,
834166L,
867533L,
900900L,
934266L,
967633L,
// Second video starts here.
1000001024000L,
1000001057366L,
1000001090733L,
1000001124100L,
1000001157466L,
1000001190833L,
1000001224200L,
1000001257566L,
1000001290933L,
1000001324300L,
1000001357666L,
1000001391033L,
1000001424400L,
1000001457766L,
1000001491133L,
1000001524500L,
1000001557866L,
1000001591233L,
1000001624600L,
1000001657966L,
1000001691333L,
1000001724700L,
1000001758066L,
1000001791433L,
1000001824800L,
1000001858166L,
1000001891533L,
1000001924900L,
1000001958266L,
1000001991633L);
1024000L,
1057366L,
1090733L,
1124100L,
1157466L,
1190833L,
1224200L,
1257566L,
1290933L,
1324300L,
1357666L,
1391033L,
1424400L,
1457766L,
1491133L,
1524500L,
1557866L,
1591233L,
1624600L,
1657966L,
1691333L,
1724700L,
1758066L,
1791433L,
1824800L,
1858166L,
1891533L,
1924900L,
1958266L,
1991633L);
assertThat(inputTimestampRecordingShaderProgram.timestampsUs)
// Seeked after the first playback ends, so the timestamps are repeated twice.
@ -253,52 +253,52 @@ public class CompositionPlayerSeekTest {
ImmutableList<Long> expectedTimestampsUs =
ImmutableList.of(
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
0L,
33366L,
66733L,
100100L,
133466L,
166833L,
200200L,
233566L,
266933L,
300300L,
333666L,
367033L,
400400L,
433766L,
467133L,
// 15 frames, seek
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
1000000500500L,
1000000533866L,
1000000567233L,
1000000600600L,
1000000633966L,
1000000667333L,
1000000700700L,
1000000734066L,
1000000767433L,
1000000800800L,
1000000834166L,
1000000867533L,
1000000900900L,
1000000934266L,
1000000967633L,
0L,
33366L,
66733L,
100100L,
133466L,
166833L,
200200L,
233566L,
266933L,
300300L,
333666L,
367033L,
400400L,
433766L,
467133L,
500500L,
533866L,
567233L,
600600L,
633966L,
667333L,
700700L,
734066L,
767433L,
800800L,
834166L,
867533L,
900900L,
934266L,
967633L,
// Second video starts here.
1000001024000L,
1000001057366L,

View file

@ -134,6 +134,25 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return renderers.toArray(new Renderer[0]);
}
private long getOffsetToCompositionTimeUs(int mediaItemIndex, long offsetUs) {
// Reverse engineer how timestamps and offsets are computed with a ConcatenatingMediaSource2
// to compute an offset converting buffer timestamps to composition timestamps.
// startPositionUs is not used because it is equal to offsetUs + clipping start time + seek
// position when seeking from any MediaItem in the playlist to the first MediaItem.
// The offset to convert the sample timestamps to composition time is negative because we need
// to remove the large offset added by ExoPlayer to make sure the decoder doesn't received any
// negative timestamps. We also need to remove the clipping start position.
long offsetToCompositionTimeUs = -offsetUs;
if (mediaItemIndex == 0) {
offsetToCompositionTimeUs -=
sequence.editedMediaItems.get(0).mediaItem.clippingConfiguration.startPositionUs;
}
for (int i = 0; i < mediaItemIndex; i++) {
offsetToCompositionTimeUs += sequence.editedMediaItems.get(i).getPresentationDurationUs();
}
return offsetToCompositionTimeUs;
}
private static final class SequenceAudioRenderer extends MediaCodecAudioRenderer {
private final SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper;
private final AudioGraphInputAudioSink audioSink;
@ -183,28 +202,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// because the super method will call onProcessedStreamChange()
pendingEditedMediaItem =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(mediaItemIndex);
// Reverse engineer how timestamps and offsets are computed with a ConcatenatingMediaSource2
// to compute an offset converting buffer timestamps to composition timestamps.
// startPositionUs is not used because it is equal to offsetUs + clipping start time + seek
// position when seeking from any MediaItem in the playlist to the first MediaItem.
// TODO(b/331547894): remove this reverse-engineered logic by moving away from using a
// ConcatenatingMediaSource2.
// The offset to convert the sample timestamps to composition time is negative because we need
// to remove the large offset added by ExoPlayer to make sure the decoder doesn't received any
// negative timestamps. We also need to remove the clipping start position.
pendingOffsetToCompositionTimeUs = -offsetUs;
if (mediaItemIndex == 0) {
pendingOffsetToCompositionTimeUs -=
pendingEditedMediaItem.mediaItem.clippingConfiguration.startPositionUs;
}
for (int i = 0; i < mediaItemIndex; i++) {
pendingOffsetToCompositionTimeUs +=
sequencePlayerRenderersWrapper
.sequence
.editedMediaItems
.get(i)
.getPresentationDurationUs();
}
pendingOffsetToCompositionTimeUs =
sequencePlayerRenderersWrapper.getOffsetToCompositionTimeUs(mediaItemIndex, offsetUs);
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
}
@ -237,6 +236,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper;
private final VideoSink videoSink;
@Nullable private ImmutableList<Effect> pendingEffect;
private long offsetToCompositionTimeUs;
public SequenceVideoRenderer(
Context context,
@ -268,13 +268,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throws ExoPlaybackException {
checkState(getTimeline().getWindowCount() == 1);
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
offsetToCompositionTimeUs =
sequencePlayerRenderersWrapper.getOffsetToCompositionTimeUs(mediaItemIndex, offsetUs);
pendingEffect =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(
getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid))
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(mediaItemIndex)
.effects
.videoEffects;
}
@Override
protected long getBufferTimestampAdjustmentUs() {
return offsetToCompositionTimeUs;
}
@Override
protected void onReadyToRegisterVideoSinkInputStream() {
@Nullable ImmutableList<Effect> pendingEffect = this.pendingEffect;
@ -298,6 +305,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private boolean inputStreamPendingRegistration;
private long streamOffsetUs;
private boolean mayRenderStartOfStream;
private long offsetToCompositionTimeUs;
public SequenceImageRenderer(SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper) {
super(
@ -362,11 +370,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
videoSink.flush();
super.onPositionReset(positionUs, joining);
timestampIterator =
new ConstantRateTimestampIterator(
/* startPositionUs= */ positionUs - streamOffsetUs,
/* endPositionUs= */ checkNotNull(editedMediaItem).getPresentationDurationUs(),
DEFAULT_FRAME_RATE);
timestampIterator = createTimestampIterator(positionUs);
videoFrameReleaseControl.reset();
if (joining) {
videoFrameReleaseControl.join(/* renderNextFrameImmediately= */ false);
@ -395,14 +399,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState(getTimeline().getWindowCount() == 1);
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
streamOffsetUs = offsetUs;
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
editedMediaItem =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(
getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid));
timestampIterator =
new ConstantRateTimestampIterator(
/* startPositionUs= */ startPositionUs - streamOffsetUs,
/* endPositionUs= */ editedMediaItem.getPresentationDurationUs(),
DEFAULT_FRAME_RATE);
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(mediaItemIndex);
offsetToCompositionTimeUs =
sequencePlayerRenderersWrapper.getOffsetToCompositionTimeUs(mediaItemIndex, offsetUs);
timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs);
videoEffects = editedMediaItem.effects.videoEffects;
inputStreamPendingRegistration = true;
}
@ -424,7 +426,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (inputStreamPendingRegistration) {
checkState(streamOffsetUs != C.TIME_UNSET);
videoSink.setPendingVideoEffects(videoEffects);
videoSink.setStreamOffsetUs(streamOffsetUs);
videoSink.setStreamOffsetAndAdjustmentUs(
streamOffsetUs, /* bufferTimestampAdjustmentUs= */ offsetToCompositionTimeUs);
videoSink.registerInputStream(
VideoSink.INPUT_TYPE_BITMAP,
new Format.Builder()
@ -438,5 +441,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
return videoSink.queueBitmap(outputImage, checkStateNotNull(timestampIterator));
}
private ConstantRateTimestampIterator createTimestampIterator(long positionUs) {
long imageBaseTimestampUs = streamOffsetUs + offsetToCompositionTimeUs;
long positionWithinImage = positionUs - streamOffsetUs;
long firstBitmapTimeUs = imageBaseTimestampUs + positionWithinImage;
long lastBitmapTimeUs =
imageBaseTimestampUs + checkNotNull(editedMediaItem).getPresentationDurationUs();
return new ConstantRateTimestampIterator(
/* startPositionUs= */ firstBitmapTimeUs,
/* endPositionUs= */ lastBitmapTimeUs,
DEFAULT_FRAME_RATE);
}
}
}

View file

@ -494,7 +494,7 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
// Do nothing.
}